diff options
author | Ben Noordhuis <info@bnoordhuis.nl> | 2014-05-12 05:07:46 +0200 |
---|---|---|
committer | Fedor Indutny <fedor@indutny.com> | 2014-06-12 17:46:17 -0700 |
commit | 3a280b2034e3ea438cd3a2e7acd1a4cd40112ac5 (patch) | |
tree | ae194faf83fd22ad890b421c2ebd537db1a52534 | |
parent | 5413d9abe0df7e22bdb650a65f4c0ac462bbe147 (diff) | |
download | node-3a280b2034e3ea438cd3a2e7acd1a4cd40112ac5.tar.gz |
deps: upgrade v8 to 3.26.33
Signed-off-by: Fedor Indutny <fedor@indutny.com>
835 files changed, 44922 insertions, 49480 deletions
diff --git a/deps/v8/.DEPS.git b/deps/v8/.DEPS.git new file mode 100644 index 000000000..e1e6982c0 --- /dev/null +++ b/deps/v8/.DEPS.git @@ -0,0 +1,48 @@ +# DO NOT EDIT EXCEPT FOR LOCAL TESTING. +# THIS IS A GENERATED FILE. +# ALL MANUAL CHANGES WILL BE OVERWRITTEN. +# SEE http://code.google.com/p/chromium/wiki/UsingGit +# FOR HOW TO ROLL DEPS +vars = { + 'webkit_url': + 'https://chromium.googlesource.com/chromium/blink.git', + 'git_url': + 'https://chromium.googlesource.com' +} + +deps = { + 'v8/build/gyp': + Var('git_url') + '/external/gyp.git@a3e2a5caf24a1e0a45401e09ad131210bf16b852', + 'v8/third_party/icu': + Var('git_url') + '/chromium/deps/icu46.git@7a1ec88f69e25b3efcf76196d07f7815255db025', +} + +deps_os = { + 'win': + { + 'v8/third_party/cygwin': + Var('git_url') + '/chromium/deps/cygwin.git@06a117a90c15174436bfa20ceebbfdf43b7eb820', + 'v8/third_party/python_26': + Var('git_url') + '/chromium/deps/python_26.git@67d19f904470effe3122d27101cc5a8195abd157', + }, +} + +include_rules = [ + +] + +skip_child_includes = [ + +] + +hooks = [ + { + 'action': + [ + 'python', + 'v8/build/gyp_v8' +], + 'pattern': + '.' +} +] diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore index de51f8a1e..ebcb5816b 100644 --- a/deps/v8/.gitignore +++ b/deps/v8/.gitignore @@ -46,6 +46,9 @@ shell_g /test/mozilla/CHECKED_OUT_VERSION /test/mozilla/data /test/mozilla/downloaded_* +/test/promises-aplus/promises-tests +/test/promises-aplus/promises-tests.tar.gz +/test/promises-aplus/sinon /test/test262/data /test/test262/tc39-test262-* /third_party @@ -63,3 +66,4 @@ GTAGS GRTAGS GSYMS GPATH +gtags.files diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn new file mode 100644 index 000000000..2a6178eab --- /dev/null +++ b/deps/v8/BUILD.gn @@ -0,0 +1,873 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# TODO(jochen): These will need to be user-settable to support standalone V8 +# builds. +v8_compress_startup_data = "off" +v8_deprecation_warnings = false +v8_enable_disassembler = false +v8_enable_gdbjit = false +v8_enable_handle_zapping = true +v8_enable_i18n_support = true +v8_enable_verify_heap = false +v8_interpreted_regexp = false +v8_object_print = false +v8_postmortem_support = false +v8_use_default_platform = true +v8_use_snapshot = true + +if (is_debug) { + v8_enable_extra_checks = true +} else { + v8_enable_extra_checks = false +} + +# TODO(jochen): Add support for want_seperate_host_toolset. +# TODO(jochen): Add toolchain.gypi support. + + +############################################################################### +# Configurations +# +config("internal_config") { + visibility = ":*" # Only targets in this file can depend on this. + + include_dirs = [ "src" ] + + if (component_mode == "shared_library") { + defines = [ + "BUILDING_V8_SHARED", + "V8_SHARED", + ] + } +} + +config("features") { + visibility = ":*" # Only targets in this file can depend on this. + + defines = [] + + if (v8_enable_disassembler == true) { + defines += [ + "ENABLE_DISASSEMBLER", + ] + } + if (v8_enable_gdbjit == true) { + defines += [ + "ENABLE_GDB_JIT_INTERFACE", + ] + } + if (v8_object_print == true) { + defines += [ + "OBJECT_PRINT", + ] + } + if (v8_enable_verify_heap == true) { + defines += [ + "VERIFY_HEAP", + ] + } + if (v8_interpreted_regexp == true) { + defines += [ + "V8_INTERPRETED_REGEXP", + ] + } + if (v8_deprecation_warnings == true) { + defines += [ + "V8_DEPRECATION_WARNINGS", + ] + } + if (v8_enable_i18n_support == true) { + defines += [ + "V8_I18N_SUPPORT", + ] + } + if (v8_use_default_platform == true) { + defines += [ + "V8_USE_DEFAULT_PLATFORM", + ] + } + if (v8_compress_startup_data == "bz2") { + defines += [ + "COMPRESS_STARTUP_DATA_BZ2", + ] + } + if (v8_enable_extra_checks == true) { + defines += [ + "ENABLE_EXTRA_CHECKS", + ] + } + if (v8_enable_handle_zapping == true) { + defines += [ + "ENABLE_HANDLE_ZAPPING", + ] + } +} + +############################################################################### +# Actions +# + +# TODO(jochen): Do actions need visibility settings as well? +action("generate_trig_table") { + visibility = ":*" # Only targets in this file can depend on this. + + script = "tools/generate-trig-table.py" + + outputs = [ + "$target_gen_dir/trig-table.cc" + ] + + args = rebase_path(outputs, root_build_dir) +} + +action("js2c") { + visibility = ":*" # Only targets in this file can depend on this. + + script = "tools/js2c.py" + + # The script depends on this other script, this rule causes a rebuild if it + # changes. + source_prereqs = [ "tools/jsmin.py" ] + + sources = [ + "src/runtime.js", + "src/v8natives.js", + "src/array.js", + "src/string.js", + "src/uri.js", + "src/math.js", + "src/messages.js", + "src/apinatives.js", + "src/debug-debugger.js", + "src/mirror-debugger.js", + "src/liveedit-debugger.js", + "src/date.js", + "src/json.js", + "src/regexp.js", + "src/arraybuffer.js", + "src/typedarray.js", + "src/object-observe.js", + "src/macros.py", + ] + + outputs = [ + "$target_gen_dir/libraries.cc" + ] + + if (v8_enable_i18n_support) { + sources += [ "src/i18n.js" ] + } + + args = + rebase_path(outputs, root_build_dir) + + [ "EXPERIMENTAL", v8_compress_startup_data ] + + rebase_path(sources, root_build_dir) +} + +action("js2c_experimental") { + visibility = ":*" # Only targets in this file can depend on this. + + script = "tools/js2c.py" + + # The script depends on this other script, this rule causes a rebuild if it + # changes. + source_prereqs = [ "tools/jsmin.py" ] + + sources = [ + "src/macros.py", + "src/symbol.js", + "src/proxy.js", + "src/collection.js", + "src/weak_collection.js", + "src/promise.js", + "src/generator.js", + "src/array-iterator.js", + "src/harmony-string.js", + "src/harmony-array.js", + "src/harmony-math.js", + ] + + outputs = [ + "$target_gen_dir/experimental-libraries.cc" + ] + + args = + rebase_path(outputs, root_build_dir) + + [ "CORE", v8_compress_startup_data ] + + rebase_path(sources, root_build_dir) +} + +action("postmortem-metadata") { + visibility = ":*" # Only targets in this file can depend on this. + + script = "tools/gen-postmortem-metadata.py" + + sources = [ + "src/objects.h", + "src/objects-inl.h", + ] + + outputs = [ + "$target_gen_dir/debug-support.cc" + ] + + args = + rebase_path(outputs, root_build_dir) + + rebase_path(sources, root_build_dir) +} + +############################################################################### +# Source Sets (aka static libraries) +# + +source_set("v8_nosnapshot") { + visibility = ":*" # Only targets in this file can depend on this. + + deps = [ + ":js2c", + ":js2c_experimental", + ":generate_trig_table", + ":v8_base", + ] + + sources = [ + "$target_gen_dir/libraries.cc", + "$target_gen_dir/experimental-libraries.cc", + "$target_gen_dir/trig-table.cc", + "src/snapshot-empty.cc", + ] + + configs += [ ":internal_config", ":features" ] +} + +source_set("v8_base") { + visibility = ":*" # Only targets in this file can depend on this. + + sources = [ + "src/accessors.cc", + "src/accessors.h", + "src/allocation.cc", + "src/allocation.h", + "src/allocation-site-scopes.cc", + "src/allocation-site-scopes.h", + "src/allocation-tracker.cc", + "src/allocation-tracker.h", + "src/api.cc", + "src/api.h", + "src/arguments.cc", + "src/arguments.h", + "src/assembler.cc", + "src/assembler.h", + "src/assert-scope.h", + "src/assert-scope.cc", + "src/ast.cc", + "src/ast.h", + "src/atomicops.h", + "src/atomicops_internals_x86_gcc.cc", + "src/bignum-dtoa.cc", + "src/bignum-dtoa.h", + "src/bignum.cc", + "src/bignum.h", + "src/bootstrapper.cc", + "src/bootstrapper.h", + "src/builtins.cc", + "src/builtins.h", + "src/bytecodes-irregexp.h", + "src/cached-powers.cc", + "src/cached-powers.h", + "src/char-predicates-inl.h", + "src/char-predicates.h", + "src/checks.cc", + "src/checks.h", + "src/circular-queue-inl.h", + "src/circular-queue.h", + "src/code-stubs.cc", + "src/code-stubs.h", + "src/code-stubs-hydrogen.cc", + "src/code.h", + "src/codegen.cc", + "src/codegen.h", + "src/compilation-cache.cc", + "src/compilation-cache.h", + "src/compiler.cc", + "src/compiler.h", + "src/contexts.cc", + "src/contexts.h", + "src/conversions-inl.h", + "src/conversions.cc", + "src/conversions.h", + "src/counters.cc", + "src/counters.h", + "src/cpu-profiler-inl.h", + "src/cpu-profiler.cc", + "src/cpu-profiler.h", + "src/cpu.cc", + "src/cpu.h", + "src/data-flow.cc", + "src/data-flow.h", + "src/date.cc", + "src/date.h", + "src/dateparser-inl.h", + "src/dateparser.cc", + "src/dateparser.h", + "src/debug-agent.cc", + "src/debug-agent.h", + "src/debug.cc", + "src/debug.h", + "src/deoptimizer.cc", + "src/deoptimizer.h", + "src/disasm.h", + "src/disassembler.cc", + "src/disassembler.h", + "src/diy-fp.cc", + "src/diy-fp.h", + "src/double.h", + "src/dtoa.cc", + "src/dtoa.h", + "src/effects.h", + "src/elements-kind.cc", + "src/elements-kind.h", + "src/elements.cc", + "src/elements.h", + "src/execution.cc", + "src/execution.h", + "src/extensions/externalize-string-extension.cc", + "src/extensions/externalize-string-extension.h", + "src/extensions/free-buffer-extension.cc", + "src/extensions/free-buffer-extension.h", + "src/extensions/gc-extension.cc", + "src/extensions/gc-extension.h", + "src/extensions/statistics-extension.cc", + "src/extensions/statistics-extension.h", + "src/extensions/trigger-failure-extension.cc", + "src/extensions/trigger-failure-extension.h", + "src/factory.cc", + "src/factory.h", + "src/fast-dtoa.cc", + "src/fast-dtoa.h", + "src/feedback-slots.h", + "src/fixed-dtoa.cc", + "src/fixed-dtoa.h", + "src/flag-definitions.h", + "src/flags.cc", + "src/flags.h", + "src/frames-inl.h", + "src/frames.cc", + "src/frames.h", + "src/full-codegen.cc", + "src/full-codegen.h", + "src/func-name-inferrer.cc", + "src/func-name-inferrer.h", + "src/gdb-jit.cc", + "src/gdb-jit.h", + "src/global-handles.cc", + "src/global-handles.h", + "src/globals.h", + "src/handles-inl.h", + "src/handles.cc", + "src/handles.h", + "src/hashmap.h", + "src/heap-inl.h", + "src/heap-profiler.cc", + "src/heap-profiler.h", + "src/heap-snapshot-generator-inl.h", + "src/heap-snapshot-generator.cc", + "src/heap-snapshot-generator.h", + "src/heap.cc", + "src/heap.h", + "src/hydrogen-alias-analysis.h", + "src/hydrogen-bce.cc", + "src/hydrogen-bce.h", + "src/hydrogen-bch.cc", + "src/hydrogen-bch.h", + "src/hydrogen-canonicalize.cc", + "src/hydrogen-canonicalize.h", + "src/hydrogen-check-elimination.cc", + "src/hydrogen-check-elimination.h", + "src/hydrogen-dce.cc", + "src/hydrogen-dce.h", + "src/hydrogen-dehoist.cc", + "src/hydrogen-dehoist.h", + "src/hydrogen-environment-liveness.cc", + "src/hydrogen-environment-liveness.h", + "src/hydrogen-escape-analysis.cc", + "src/hydrogen-escape-analysis.h", + "src/hydrogen-flow-engine.h", + "src/hydrogen-instructions.cc", + "src/hydrogen-instructions.h", + "src/hydrogen.cc", + "src/hydrogen.h", + "src/hydrogen-gvn.cc", + "src/hydrogen-gvn.h", + "src/hydrogen-infer-representation.cc", + "src/hydrogen-infer-representation.h", + "src/hydrogen-infer-types.cc", + "src/hydrogen-infer-types.h", + "src/hydrogen-load-elimination.cc", + "src/hydrogen-load-elimination.h", + "src/hydrogen-mark-deoptimize.cc", + "src/hydrogen-mark-deoptimize.h", + "src/hydrogen-mark-unreachable.cc", + "src/hydrogen-mark-unreachable.h", + "src/hydrogen-osr.cc", + "src/hydrogen-osr.h", + "src/hydrogen-range-analysis.cc", + "src/hydrogen-range-analysis.h", + "src/hydrogen-redundant-phi.cc", + "src/hydrogen-redundant-phi.h", + "src/hydrogen-removable-simulates.cc", + "src/hydrogen-removable-simulates.h", + "src/hydrogen-representation-changes.cc", + "src/hydrogen-representation-changes.h", + "src/hydrogen-sce.cc", + "src/hydrogen-sce.h", + "src/hydrogen-store-elimination.cc", + "src/hydrogen-store-elimination.h", + "src/hydrogen-uint32-analysis.cc", + "src/hydrogen-uint32-analysis.h", + "src/i18n.cc", + "src/i18n.h", + "src/icu_util.cc", + "src/icu_util.h", + "src/ic-inl.h", + "src/ic.cc", + "src/ic.h", + "src/incremental-marking.cc", + "src/incremental-marking.h", + "src/interface.cc", + "src/interface.h", + "src/interpreter-irregexp.cc", + "src/interpreter-irregexp.h", + "src/isolate.cc", + "src/isolate.h", + "src/json-parser.h", + "src/json-stringifier.h", + "src/jsregexp-inl.h", + "src/jsregexp.cc", + "src/jsregexp.h", + "src/lazy-instance.h", + # TODO(jochen): move libplatform/ files to their own target. + "src/libplatform/default-platform.cc", + "src/libplatform/default-platform.h", + "src/libplatform/task-queue.cc", + "src/libplatform/task-queue.h", + "src/libplatform/worker-thread.cc", + "src/libplatform/worker-thread.h", + "src/list-inl.h", + "src/list.h", + "src/lithium-allocator-inl.h", + "src/lithium-allocator.cc", + "src/lithium-allocator.h", + "src/lithium-codegen.cc", + "src/lithium-codegen.h", + "src/lithium.cc", + "src/lithium.h", + "src/liveedit.cc", + "src/liveedit.h", + "src/log-inl.h", + "src/log-utils.cc", + "src/log-utils.h", + "src/log.cc", + "src/log.h", + "src/macro-assembler.h", + "src/mark-compact.cc", + "src/mark-compact.h", + "src/messages.cc", + "src/messages.h", + "src/msan.h", + "src/natives.h", + "src/objects-debug.cc", + "src/objects-inl.h", + "src/objects-printer.cc", + "src/objects-visiting.cc", + "src/objects-visiting.h", + "src/objects.cc", + "src/objects.h", + "src/once.cc", + "src/once.h", + "src/optimizing-compiler-thread.h", + "src/optimizing-compiler-thread.cc", + "src/parser.cc", + "src/parser.h", + "src/platform/elapsed-timer.h", + "src/platform/time.cc", + "src/platform/time.h", + "src/platform.h", + "src/platform/condition-variable.cc", + "src/platform/condition-variable.h", + "src/platform/mutex.cc", + "src/platform/mutex.h", + "src/platform/semaphore.cc", + "src/platform/semaphore.h", + "src/platform/socket.cc", + "src/platform/socket.h", + "src/preparse-data-format.h", + "src/preparse-data.cc", + "src/preparse-data.h", + "src/preparser.cc", + "src/preparser.h", + "src/prettyprinter.cc", + "src/prettyprinter.h", + "src/profile-generator-inl.h", + "src/profile-generator.cc", + "src/profile-generator.h", + "src/property-details.h", + "src/property.cc", + "src/property.h", + "src/regexp-macro-assembler-irregexp-inl.h", + "src/regexp-macro-assembler-irregexp.cc", + "src/regexp-macro-assembler-irregexp.h", + "src/regexp-macro-assembler-tracer.cc", + "src/regexp-macro-assembler-tracer.h", + "src/regexp-macro-assembler.cc", + "src/regexp-macro-assembler.h", + "src/regexp-stack.cc", + "src/regexp-stack.h", + "src/rewriter.cc", + "src/rewriter.h", + "src/runtime-profiler.cc", + "src/runtime-profiler.h", + "src/runtime.cc", + "src/runtime.h", + "src/safepoint-table.cc", + "src/safepoint-table.h", + "src/sampler.cc", + "src/sampler.h", + "src/scanner-character-streams.cc", + "src/scanner-character-streams.h", + "src/scanner.cc", + "src/scanner.h", + "src/scopeinfo.cc", + "src/scopeinfo.h", + "src/scopes.cc", + "src/scopes.h", + "src/serialize.cc", + "src/serialize.h", + "src/small-pointer-list.h", + "src/smart-pointers.h", + "src/snapshot-common.cc", + "src/snapshot.h", + "src/spaces-inl.h", + "src/spaces.cc", + "src/spaces.h", + "src/store-buffer-inl.h", + "src/store-buffer.cc", + "src/store-buffer.h", + "src/string-search.cc", + "src/string-search.h", + "src/string-stream.cc", + "src/string-stream.h", + "src/strtod.cc", + "src/strtod.h", + "src/stub-cache.cc", + "src/stub-cache.h", + "src/sweeper-thread.h", + "src/sweeper-thread.cc", + "src/token.cc", + "src/token.h", + "src/transitions-inl.h", + "src/transitions.cc", + "src/transitions.h", + "src/type-info.cc", + "src/type-info.h", + "src/types-inl.h", + "src/types.cc", + "src/types.h", + "src/typing.cc", + "src/typing.h", + "src/unbound-queue-inl.h", + "src/unbound-queue.h", + "src/unicode-inl.h", + "src/unicode.cc", + "src/unicode.h", + "src/unique.h", + "src/uri.h", + "src/utils-inl.h", + "src/utils.cc", + "src/utils.h", + "src/utils/random-number-generator.cc", + "src/utils/random-number-generator.h", + "src/v8.cc", + "src/v8.h", + "src/v8checks.h", + "src/v8globals.h", + "src/v8memory.h", + "src/v8threads.cc", + "src/v8threads.h", + "src/variables.cc", + "src/variables.h", + "src/version.cc", + "src/version.h", + "src/vm-state-inl.h", + "src/vm-state.h", + "src/zone-inl.h", + "src/zone.cc", + "src/zone.h", + ] + + if (cpu_arch == "x86") { + sources += [ + "src/ia32/assembler-ia32-inl.h", + "src/ia32/assembler-ia32.cc", + "src/ia32/assembler-ia32.h", + "src/ia32/builtins-ia32.cc", + "src/ia32/code-stubs-ia32.cc", + "src/ia32/code-stubs-ia32.h", + "src/ia32/codegen-ia32.cc", + "src/ia32/codegen-ia32.h", + "src/ia32/cpu-ia32.cc", + "src/ia32/debug-ia32.cc", + "src/ia32/deoptimizer-ia32.cc", + "src/ia32/disasm-ia32.cc", + "src/ia32/frames-ia32.cc", + "src/ia32/frames-ia32.h", + "src/ia32/full-codegen-ia32.cc", + "src/ia32/ic-ia32.cc", + "src/ia32/lithium-codegen-ia32.cc", + "src/ia32/lithium-codegen-ia32.h", + "src/ia32/lithium-gap-resolver-ia32.cc", + "src/ia32/lithium-gap-resolver-ia32.h", + "src/ia32/lithium-ia32.cc", + "src/ia32/lithium-ia32.h", + "src/ia32/macro-assembler-ia32.cc", + "src/ia32/macro-assembler-ia32.h", + "src/ia32/regexp-macro-assembler-ia32.cc", + "src/ia32/regexp-macro-assembler-ia32.h", + "src/ia32/stub-cache-ia32.cc", + ] + } else if (cpu_arch == "x64") { + sources += [ + "src/x64/assembler-x64-inl.h", + "src/x64/assembler-x64.cc", + "src/x64/assembler-x64.h", + "src/x64/builtins-x64.cc", + "src/x64/code-stubs-x64.cc", + "src/x64/code-stubs-x64.h", + "src/x64/codegen-x64.cc", + "src/x64/codegen-x64.h", + "src/x64/cpu-x64.cc", + "src/x64/debug-x64.cc", + "src/x64/deoptimizer-x64.cc", + "src/x64/disasm-x64.cc", + "src/x64/frames-x64.cc", + "src/x64/frames-x64.h", + "src/x64/full-codegen-x64.cc", + "src/x64/ic-x64.cc", + "src/x64/lithium-codegen-x64.cc", + "src/x64/lithium-codegen-x64.h", + "src/x64/lithium-gap-resolver-x64.cc", + "src/x64/lithium-gap-resolver-x64.h", + "src/x64/lithium-x64.cc", + "src/x64/lithium-x64.h", + "src/x64/macro-assembler-x64.cc", + "src/x64/macro-assembler-x64.h", + "src/x64/regexp-macro-assembler-x64.cc", + "src/x64/regexp-macro-assembler-x64.h", + "src/x64/stub-cache-x64.cc", + ] + } else if (cpu_arch == "arm") { + sources += [ + "src/arm/assembler-arm-inl.h", + "src/arm/assembler-arm.cc", + "src/arm/assembler-arm.h", + "src/arm/builtins-arm.cc", + "src/arm/code-stubs-arm.cc", + "src/arm/code-stubs-arm.h", + "src/arm/codegen-arm.cc", + "src/arm/codegen-arm.h", + "src/arm/constants-arm.h", + "src/arm/constants-arm.cc", + "src/arm/cpu-arm.cc", + "src/arm/debug-arm.cc", + "src/arm/deoptimizer-arm.cc", + "src/arm/disasm-arm.cc", + "src/arm/frames-arm.cc", + "src/arm/frames-arm.h", + "src/arm/full-codegen-arm.cc", + "src/arm/ic-arm.cc", + "src/arm/lithium-arm.cc", + "src/arm/lithium-arm.h", + "src/arm/lithium-codegen-arm.cc", + "src/arm/lithium-codegen-arm.h", + "src/arm/lithium-gap-resolver-arm.cc", + "src/arm/lithium-gap-resolver-arm.h", + "src/arm/macro-assembler-arm.cc", + "src/arm/macro-assembler-arm.h", + "src/arm/regexp-macro-assembler-arm.cc", + "src/arm/regexp-macro-assembler-arm.h", + "src/arm/simulator-arm.cc", + "src/arm/stub-cache-arm.cc", + ] + } else if (cpu_arch == "arm64") { + sources += [ + "src/arm64/assembler-arm64.cc", + "src/arm64/assembler-arm64.h", + "src/arm64/assembler-arm64-inl.h", + "src/arm64/builtins-arm64.cc", + "src/arm64/codegen-arm64.cc", + "src/arm64/codegen-arm64.h", + "src/arm64/code-stubs-arm64.cc", + "src/arm64/code-stubs-arm64.h", + "src/arm64/constants-arm64.h", + "src/arm64/cpu-arm64.cc", + "src/arm64/cpu-arm64.h", + "src/arm64/debug-arm64.cc", + "src/arm64/decoder-arm64.cc", + "src/arm64/decoder-arm64.h", + "src/arm64/decoder-arm64-inl.h", + "src/arm64/deoptimizer-arm64.cc", + "src/arm64/disasm-arm64.cc", + "src/arm64/disasm-arm64.h", + "src/arm64/frames-arm64.cc", + "src/arm64/frames-arm64.h", + "src/arm64/full-codegen-arm64.cc", + "src/arm64/ic-arm64.cc", + "src/arm64/instructions-arm64.cc", + "src/arm64/instructions-arm64.h", + "src/arm64/instrument-arm64.cc", + "src/arm64/instrument-arm64.h", + "src/arm64/lithium-arm64.cc", + "src/arm64/lithium-arm64.h", + "src/arm64/lithium-codegen-arm64.cc", + "src/arm64/lithium-codegen-arm64.h", + "src/arm64/lithium-gap-resolver-arm64.cc", + "src/arm64/lithium-gap-resolver-arm64.h", + "src/arm64/macro-assembler-arm64.cc", + "src/arm64/macro-assembler-arm64.h", + "src/arm64/macro-assembler-arm64-inl.h", + "src/arm64/regexp-macro-assembler-arm64.cc", + "src/arm64/regexp-macro-assembler-arm64.h", + "src/arm64/simulator-arm64.cc", + "src/arm64/simulator-arm64.h", + "src/arm64/stub-cache-arm64.cc", + "src/arm64/utils-arm64.cc", + "src/arm64/utils-arm64.h", + ] + } else if (cpu_arch == "mipsel") { + sources += [ + "src/mips/assembler-mips.cc", + "src/mips/assembler-mips.h", + "src/mips/assembler-mips-inl.h", + "src/mips/builtins-mips.cc", + "src/mips/codegen-mips.cc", + "src/mips/codegen-mips.h", + "src/mips/code-stubs-mips.cc", + "src/mips/code-stubs-mips.h", + "src/mips/constants-mips.cc", + "src/mips/constants-mips.h", + "src/mips/cpu-mips.cc", + "src/mips/debug-mips.cc", + "src/mips/deoptimizer-mips.cc", + "src/mips/disasm-mips.cc", + "src/mips/frames-mips.cc", + "src/mips/frames-mips.h", + "src/mips/full-codegen-mips.cc", + "src/mips/ic-mips.cc", + "src/mips/lithium-codegen-mips.cc", + "src/mips/lithium-codegen-mips.h", + "src/mips/lithium-gap-resolver-mips.cc", + "src/mips/lithium-gap-resolver-mips.h", + "src/mips/lithium-mips.cc", + "src/mips/lithium-mips.h", + "src/mips/macro-assembler-mips.cc", + "src/mips/macro-assembler-mips.h", + "src/mips/regexp-macro-assembler-mips.cc", + "src/mips/regexp-macro-assembler-mips.h", + "src/mips/simulator-mips.cc", + "src/mips/stub-cache-mips.cc", + ] + } + + configs += [ ":internal_config", ":features" ] + + defines = [] + deps = [] + + if (is_posix) { + sources += [ + "src/platform-posix.cc" + ] + } + + if (is_linux) { + sources += [ + "src/platform-linux.cc" + ] + + # TODO(brettw) + # 'conditions': [ + # ['v8_compress_startup_data=="bz2"', { + # 'libraries': [ + # '-lbz2', + # ] + # }], + # ], + + libs = [ "rt" ] + } else if (is_android) { + # TODO(brettw) OS=="android" condition from tools/gyp/v8.gyp + } else if (is_mac) { + sources += [ "src/platform-macos,cc" ] + } else if (is_win) { + sources += [ + "src/platform-win32.cc", + "src/win32-math.cc", + "src/win32-math.h", + ] + + defines += [ "_CRT_RAND_S" ] # for rand_s() + + libs = [ "winmm.lib", "ws2_32.lib" ] + } + + + if (v8_enable_i18n_support) { + deps += [ "//third_party/icu" ] + if (is_win) { + deps += [ "//third_party/icu:icudata" ] + } + } else { + sources -= [ + "src/i18n.cc", + "src/i18n.h", + ] + } + + # TODO(brettw) other conditions from v8.gyp + # TODO(brettw) icu_use_data_file_flag +} + +############################################################################### +# Executables +# + +# TODO(jochen): Remove this as soon as toolchain.gypi is integrated. +if (build_cpu_arch != cpu_arch) { + +executable("mksnapshot") { + sources = [ + ] +} + +} else { + +executable("mksnapshot") { + sources = [ + "src/mksnapshot.cc", + ] + + configs += [ ":internal_config", ":features" ] + + deps = [ + ":v8_base", + ":v8_nosnapshot", + ] + + if (v8_compress_startup_data == "bz2") { + libs = [ "bz2" ] + } +} + +} diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 879515d74..8f1d25638 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,3 +1,308 @@ +2014-05-08: Version 3.26.33 + + Removed default Isolate (Chromium issue 359977). + + Performance and stability improvements on all platforms. + + +2014-05-07: Version 3.26.32 + + Performance and stability improvements on all platforms. + + +2014-05-06: Version 3.26.31 + + Add a basic gn file for V8. + + Performance and stability improvements on all platforms. + + +2014-05-05: Version 3.26.30 + + Introduce a microtask suppression scope and move microtask methods to + isolate (Chromium issue 369503). + + Re-enable Object.observe and add enforcement for security invariants. + + Move cache line size calculation directly into CPU::FlushICache + (Chromium issue 359977). + + Generation of our home-grown memmove doesn't depend on serializer state + anymore (Chromium issue 359977). + + Fix |RunMicrotasks()| leaking reference to the last context being run + on. + + Object.defineProperty shouldn't be a hint that we're constructing a + dictionary (Chromium issue 362870). + + Performance and stability improvements on all platforms. + + +2014-05-01: Version 3.26.29 + + Added a Isolate* parameter to Serializer::enabled() (Chromium issue + 359977). + + ES6: Add support for Array.prototype.fill() (issue 3273). + + Performance and stability improvements on all platforms. + + +2014-04-29: Version 3.26.28 + + PromiseThen should ignore non-function parameters (Chromium issue + 347455). + + Performance and stability improvements on all platforms. + + +2014-04-29: Version 3.26.27 + + Error stack getter should not overwrite itself with a data property + (issue 3294). + + Performance and stability improvements on all platforms. + + +2014-04-28: Version 3.26.26 + + Expose promise value through promise mirror (issue 3093). + + Simplified CPU/CpuFeatures a bit (Chromium issue 359977). + + Performance and stability improvements on all platforms. + + +2014-04-28: Version 3.26.25 + + Add timestamps to CPU profile samples (Chromium issue 363976). + + Expose promise status through promise mirror (issue 3093). + + Remove static CallCompletedCallback handlers. + + Added an Isolate* field to NoTrackDoubleFieldsForSerializerScope, + PlatformFeatureScope and BinaryOpIC::State (Chromium issue 359977). + + Trigger debug event on not yet caught exception in promises (issue + 3093). + + Unbreak vtunejit=on (issue 3288). + + Performance and stability improvements on all platforms. + + +2014-04-25: Version 3.26.24 + + MIPS: CodeStubs contain their corresponding Isolate* now. (part 2) + (Chromium issue 359977). + + MIPS: CodeStubs contain their corresponding Isolate* now. (part 1) + (Chromium issue 359977). + + CodeStubs contain their corresponding Isolate* now. (part 2) (Chromium + issue 359977). + + Make DescriptorArray::IsMoreGeneralThan() and DescriptorArray::Merge() + compatible again (Chromium issue 365172). + + CodeStubs contain their corresponding Isolate* now. (part 1) (Chromium + issue 359977). + + Performance and stability improvements on all platforms. + + +2014-04-24: Version 3.26.23 + + Performance and stability improvements on all platforms. + + +2014-04-23: Version 3.26.22 + + Disable field type tracking by default (Chromium issue 365172). + + Performance and stability improvements on all platforms. + + +2014-04-23: Version 3.26.21 + + Context-allocate all parameters in generators (issue 3280). + + Simplify v8/Isolate teardown (Chromium issue 359977). + + Performance and stability improvements on all platforms. + + +2014-04-21: Version 3.26.20 + + ES6: Add support for Map/Set forEach (Chromium issues 1793, 2323). + + Performance and stability improvements on all platforms. + + +2014-04-18: Version 3.26.19 + + ES6: Add support for Map/Set forEach (Chromium issues 1793, 2323). + + Performance and stability improvements on all platforms. + + +2014-04-17: Version 3.26.18 + + Removed Isolate::EnterDefaultIsolate (Chromium issue 359977). + + Performance and stability improvements on all platforms. + + +2014-04-16: Version 3.26.17 + + Clear invalid field maps in PropertyAccessInfo (Chromium issue 363956). + + ES6: Add support for Map/Set forEach (Chromium issues 1793, 2323). + + Performance and stability improvements on all platforms. + + +2014-04-16: Version 3.26.16 + + Removed EnterIsolateIfNeeded and a soon-to-be-useless assertion + (Chromium issue 359977). + + Removed GetDefaultIsolate{Debugger,ForLocking,StackGuard} (Chromium + issue 359977). + + Performance and stability improvements on all platforms. + + +2014-04-15: Version 3.26.15 + + Fix result of LCodeGen::DoWrapReceiver for strict functions and builtins + (Chromium issue 362128). + + Performance and stability improvements on all platforms. + + +2014-04-15: Version 3.26.14 + + Performance and stability improvements on all platforms. + + +2014-04-14: Version 3.26.13 + + Make maps in monomorphic IC stubs weak (issue 2073). + + x64: Make sure that the upper half of a 64bit register contains 0 for + int32 values (Chromium issue 360611). + + Performance and stability improvements on all platforms. + + +2014-04-11: Version 3.26.12 + + Do not use ranges after range analysis (Chromium issue 361608). + + Performance and stability improvements on all platforms. + + +2014-04-10: Version 3.26.11 + + Performance and stability improvements on all platforms. + + +2014-04-10: Version 3.26.10 + + Allow the embedder to pass the virtual memory limit to v8. + + Performance and stability improvements on all platforms. + + +2014-04-09: Version 3.26.9 + + Fix invalid local property lookup for transitions (Chromium issue + 361025). + + MIPS: Fixed flooring division by -1 (issue 3259). + + Fixed flooring division by -1 on ARM (issue 3259). + + Make `String.prototype.contains` throw when passing a regular expression + (issue 3261). + + Performance and stability improvements on all platforms. + + +2014-04-08: Version 3.26.8 + + Yet another regression test for range analysis (issue 3204). + + Performance and stability improvements on all platforms. + + +2014-04-07: Version 3.26.7 + + Performance and stability improvements on all platforms. + + +2014-04-04: Version 3.26.6 + + Performance and stability improvements on all platforms. + + +2014-04-03: Version 3.26.5 + + Performance and stability improvements on all platforms. + + +2014-04-03: Version 3.26.4 + + Make stray 'return' an early error. + + Show references from weak containers as weak in heap snapshots (Chromium + issue 356590). + + Make invalid LHSs that are calls late errors (Chromium issue 358346). + + Performance and stability improvements on all platforms. + + +2014-04-02: Version 3.26.3 + + Support typed arrays in IsMoreGeneralElementsKindTransition (Chromium + issue 357054). + + Remove debugger_auto_break flag. + + Store i18n meta data in hidden symbols instead of js accessible + properties (Chromium issue 354967). + + Performance and stability improvements on all platforms. + + +2014-04-01: Version 3.26.2 + + Performance and stability improvements on all platforms. + + +2014-04-01: Version 3.26.1 + + Fix Type::Intersect to skip uninhabited bitsets (Chromium issue 357330). + + Fix PrepareKeyedOperand on arm (Chromium issue 358057). + + Performance and stability improvements on all platforms. + + +2014-03-31: Version 3.26.0 + + Deprecate Start/StopCpuProfiling methods (issue 3213). + + Don't crash if we get a timezone change notification on an uninitialized + isolate (Chromium issue 357362). + + Performance and stability improvements on all platforms. + + 2014-03-28: Version 3.25.30 NativeContext::map_cache reference should be strong in heap snapshots diff --git a/deps/v8/DEPS b/deps/v8/DEPS index 353c5c8b3..24b784158 100644 --- a/deps/v8/DEPS +++ b/deps/v8/DEPS @@ -8,7 +8,7 @@ deps = { "http://gyp.googlecode.com/svn/trunk@1831", "v8/third_party/icu": - "https://src.chromium.org/chrome/trunk/deps/third_party/icu46@258359", + "https://src.chromium.org/svn/trunk/deps/third_party/icu46@258359", } deps_os = { diff --git a/deps/v8/Makefile b/deps/v8/Makefile index cdf5d7483..a99b09c07 100644 --- a/deps/v8/Makefile +++ b/deps/v8/Makefile @@ -96,10 +96,6 @@ endif ifeq ($(optdebug), on) GYPFLAGS += -Dv8_optimized_debug=2 endif -# debuggersupport=off -ifeq ($(debuggersupport), off) - GYPFLAGS += -Dv8_enable_debugger_support=0 -endif # unalignedaccess=on ifeq ($(unalignedaccess), on) GYPFLAGS += -Dv8_can_use_unaligned_accesses=true @@ -140,9 +136,9 @@ endif # asan=/path/to/clang++ ifneq ($(strip $(asan)),) GYPFLAGS += -Dasan=1 - export CXX="$(asan)" - export CXX_host="$(asan)" - export LINK="$(asan)" + export CXX=$(asan) + export CXX_host=$(asan) + export LINK=$(asan) export ASAN_SYMBOLIZER_PATH="$(dir $(asan))llvm-symbolizer" endif @@ -232,7 +228,7 @@ endif # Architectures and modes to be compiled. Consider these to be internal # variables, don't override them (use the targets instead). -ARCHES = ia32 x64 arm arm64 mipsel +ARCHES = ia32 x64 arm arm64 mips mipsel DEFAULT_ARCHES = ia32 x64 arm MODES = release debug optdebug DEFAULT_MODES = release debug @@ -281,10 +277,6 @@ buildbot: $(MAKE) -C "$(OUTDIR)" BUILDTYPE=$(BUILDTYPE) \ builddir="$(abspath $(OUTDIR))/$(BUILDTYPE)" -mips mips.release mips.debug: - @echo "V8 does not support big-endian MIPS builds at the moment," \ - "please use little-endian builds (mipsel)." - # Compile targets. MODES and ARCHES are convenience targets. .SECONDEXPANSION: $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES)) @@ -409,7 +401,7 @@ native.clean: rm -rf $(OUTDIR)/native find $(OUTDIR) -regex '.*\(host\|target\)\.native\.mk' -delete -clean: $(addsuffix .clean, $(ARCHES) $(ANDROID_ARCHES) $(NACL_ARCHES)) native.clean +clean: $(addsuffix .clean, $(ARCHES) $(ANDROID_ARCHES) $(NACL_ARCHES)) native.clean gtags.clean # GYP file generation targets. OUT_MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(BUILDS)) @@ -467,6 +459,18 @@ grokdump: ia32.release @cat $(DUMP_FILE).tmpl > $(DUMP_FILE) @$(OUTDIR)/ia32.release/d8 --dump-heap-constants >> $(DUMP_FILE) +# Support for the GNU GLOBAL Source Code Tag System. +gtags.files: $(GYPFILES) $(ENVFILE) + @find include src test -name '*.h' -o -name '*.cc' -o -name '*.c' > $@ + +# We need to manually set the stack limit here, to work around bugs in +# gmake-3.81 and global-5.7.1 on recent 64-bit Linux systems. +GPATH GRTAGS GSYMS GTAGS: gtags.files $(shell cat gtags.files 2> /dev/null) + @bash -c 'ulimit -s 10240 && GTAGSFORCECPP=yes gtags -i -q -f $<' + +gtags.clean: + rm -f gtags.files GPATH GRTAGS GSYMS GTAGS + # Dependencies. # Remember to keep these in sync with the DEPS file. dependencies: diff --git a/deps/v8/Makefile.android b/deps/v8/Makefile.android index f4e144f28..396b58d74 100644 --- a/deps/v8/Makefile.android +++ b/deps/v8/Makefile.android @@ -47,20 +47,20 @@ else endif ifeq ($(ARCH), android_arm) - DEFINES = target_arch=arm v8_target_arch=arm android_target_arch=arm + DEFINES = target_arch=arm v8_target_arch=arm android_target_arch=arm android_target_platform=14 DEFINES += arm_neon=0 arm_version=7 TOOLCHAIN_ARCH = arm-linux-androideabi TOOLCHAIN_PREFIX = $(TOOLCHAIN_ARCH) TOOLCHAIN_VER = 4.6 else ifeq ($(ARCH), android_arm64) - DEFINES = target_arch=arm64 v8_target_arch=arm64 android_target_arch=arm64 + DEFINES = target_arch=arm64 v8_target_arch=arm64 android_target_arch=arm64 android_target_platform=20 TOOLCHAIN_ARCH = aarch64-linux-android TOOLCHAIN_PREFIX = $(TOOLCHAIN_ARCH) TOOLCHAIN_VER = 4.8 else ifeq ($(ARCH), android_mipsel) - DEFINES = target_arch=mipsel v8_target_arch=mipsel + DEFINES = target_arch=mipsel v8_target_arch=mipsel android_target_platform=14 DEFINES += android_target_arch=mips mips_arch_variant=mips32r2 TOOLCHAIN_ARCH = mipsel-linux-android TOOLCHAIN_PREFIX = $(TOOLCHAIN_ARCH) @@ -68,7 +68,7 @@ else else ifeq ($(ARCH), android_ia32) - DEFINES = target_arch=ia32 v8_target_arch=ia32 android_target_arch=x86 + DEFINES = target_arch=ia32 v8_target_arch=ia32 android_target_arch=x86 android_target_platform=14 TOOLCHAIN_ARCH = x86 TOOLCHAIN_PREFIX = i686-linux-android TOOLCHAIN_VER = 4.6 diff --git a/deps/v8/Makefile.nacl b/deps/v8/Makefile.nacl index fc3eb28ae..1d34a3b30 100644 --- a/deps/v8/Makefile.nacl +++ b/deps/v8/Makefile.nacl @@ -77,6 +77,9 @@ GYPENV += host_os=${HOST_OS} # ICU doesn't support NaCl. GYPENV += v8_enable_i18n_support=0 +# Disable strict aliasing - v8 code often relies on undefined behavior of C++. +GYPENV += v8_no_strict_aliasing=1 + NACL_MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(NACL_BUILDS)) .SECONDEXPANSION: # For some reason the $$(basename $$@) expansion didn't work here... diff --git a/deps/v8/OWNERS b/deps/v8/OWNERS index 186fc10db..2fbb3ef2a 100644 --- a/deps/v8/OWNERS +++ b/deps/v8/OWNERS @@ -11,6 +11,7 @@ machenbach@chromium.org marja@chromium.org mstarzinger@chromium.org mvstanton@chromium.org +rmcilroy@chromium.org rossberg@chromium.org svenpanne@chromium.org titzer@chromium.org diff --git a/deps/v8/PRESUBMIT.py b/deps/v8/PRESUBMIT.py index 4f7a96009..41d79eb53 100644 --- a/deps/v8/PRESUBMIT.py +++ b/deps/v8/PRESUBMIT.py @@ -103,6 +103,13 @@ def CheckChangeOnCommit(input_api, output_api): def GetPreferredTryMasters(project, change): return { 'tryserver.v8': { + 'v8_linux_rel': set(['defaulttests']), + 'v8_linux_dbg': set(['defaulttests']), + 'v8_linux_nosnap_rel': set(['defaulttests']), + 'v8_linux_nosnap_dbg': set(['defaulttests']), + 'v8_linux64_rel': set(['defaulttests']), + 'v8_linux_arm_dbg': set(['defaulttests']), + 'v8_linux_arm64_rel': set(['defaulttests']), 'v8_mac_rel': set(['defaulttests']), 'v8_win_rel': set(['defaulttests']), }, diff --git a/deps/v8/build/android.gypi b/deps/v8/build/android.gypi index 9570f444f..73ac93a43 100644 --- a/deps/v8/build/android.gypi +++ b/deps/v8/build/android.gypi @@ -51,7 +51,7 @@ 'android_stlport_libs': '<(android_stlport)/libs', }, { 'variables': { - 'android_sysroot': '<(android_ndk_root)/platforms/android-9/arch-<(android_target_arch)', + 'android_sysroot': '<(android_ndk_root)/platforms/android-<(android_target_platform)/arch-<(android_target_arch)', 'android_stlport': '<(android_ndk_root)/sources/cxx-stl/stlport/', }, 'android_include': '<(android_sysroot)/usr/include', diff --git a/deps/v8/build/features.gypi b/deps/v8/build/features.gypi index f0e721209..d542d05bb 100644 --- a/deps/v8/build/features.gypi +++ b/deps/v8/build/features.gypi @@ -31,8 +31,6 @@ 'variables': { 'v8_compress_startup_data%': 'off', - 'v8_enable_debugger_support%': 1, - 'v8_enable_disassembler%': 0, 'v8_enable_gdbjit%': 0, @@ -64,9 +62,6 @@ }, 'target_defaults': { 'conditions': [ - ['v8_enable_debugger_support==1', { - 'defines': ['ENABLE_DEBUGGER_SUPPORT',], - }], ['v8_enable_disassembler==1', { 'defines': ['ENABLE_DISASSEMBLER',], }], @@ -98,7 +93,8 @@ }], ], # conditions 'configurations': { - 'Debug': { + 'DebugBaseCommon': { + 'abstract': 1, 'variables': { 'v8_enable_extra_checks%': 1, 'v8_enable_handle_zapping%': 1, diff --git a/deps/v8/build/gyp_v8 b/deps/v8/build/gyp_v8 index f2a60d1b2..bc733dfca 100755 --- a/deps/v8/build/gyp_v8 +++ b/deps/v8/build/gyp_v8 @@ -158,7 +158,8 @@ if __name__ == '__main__': # Generate for the architectures supported on the given platform. gyp_args = list(args) - if platform.system() == 'Linux': + gyp_generators = os.environ.get('GYP_GENERATORS') + if platform.system() == 'Linux' and gyp_generators != 'ninja': # Work around for crbug.com/331475. for f in glob.glob(os.path.join(v8_root, 'out', 'Makefile.*')): os.unlink(f) diff --git a/deps/v8/build/standalone.gypi b/deps/v8/build/standalone.gypi index 6ff0170b9..befa73851 100644 --- a/deps/v8/build/standalone.gypi +++ b/deps/v8/build/standalone.gypi @@ -55,8 +55,8 @@ '<!(uname -m | sed -e "s/i.86/ia32/;\ s/x86_64/x64/;\ s/amd64/x64/;\ - s/aarch64/arm64/;\ s/arm.*/arm/;\ + s/aarch64/arm64/;\ s/mips.*/mipsel/")', }, { # OS!="linux" and OS!="freebsd" and OS!="openbsd" and @@ -135,9 +135,15 @@ }, 'default_configuration': 'Debug', 'configurations': { - 'Debug': { + 'DebugBaseCommon': { 'cflags': [ '-g', '-O0' ], }, + 'Optdebug': { + 'inherit_from': [ 'DebugBaseCommon', 'DebugBase2' ], + }, + 'Debug': { + # Xcode insists on this empty entry. + }, 'Release': { # Xcode insists on this empty entry. }, @@ -321,7 +327,6 @@ 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES', 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden 'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics - 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof 'GCC_WARN_NON_VIRTUAL_DESTRUCTOR': 'YES', # -Wnon-virtual-dtor # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min 'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)', diff --git a/deps/v8/build/toolchain.gypi b/deps/v8/build/toolchain.gypi index 4a70d6f7a..a9958ce8d 100644 --- a/deps/v8/build/toolchain.gypi +++ b/deps/v8/build/toolchain.gypi @@ -278,6 +278,57 @@ 'V8_TARGET_ARCH_IA32', ], }], # v8_target_arch=="ia32" + ['v8_target_arch=="mips"', { + 'defines': [ + 'V8_TARGET_ARCH_MIPS', + ], + 'variables': { + 'mipscompiler': '<!($(echo <(CXX)) -v 2>&1 | grep -q "^Target: mips" && echo "yes" || echo "no")', + }, + 'conditions': [ + ['mipscompiler=="yes"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': ['-EB'], + 'ldflags': ['-EB'], + 'conditions': [ + [ 'v8_use_mips_abi_hardfloat=="true"', { + 'cflags': ['-mhard-float'], + 'ldflags': ['-mhard-float'], + }, { + 'cflags': ['-msoft-float'], + 'ldflags': ['-msoft-float'], + }], + ['mips_arch_variant=="mips32r2"', { + 'cflags': ['-mips32r2', '-Wa,-mips32r2'], + }], + ['mips_arch_variant=="mips32r1"', { + 'cflags': ['-mips32', '-Wa,-mips32'], + }], + ], + }], + ], + }], + [ 'v8_can_use_fpu_instructions=="true"', { + 'defines': [ + 'CAN_USE_FPU_INSTRUCTIONS', + ], + }], + [ 'v8_use_mips_abi_hardfloat=="true"', { + 'defines': [ + '__mips_hard_float=1', + 'CAN_USE_FPU_INSTRUCTIONS', + ], + }, { + 'defines': [ + '__mips_soft_float=1' + ], + }], + ['mips_arch_variant=="mips32r2"', { + 'defines': ['_MIPS_ARCH_MIPS32R2',], + }], + ], + }], # v8_target_arch=="mips" ['v8_target_arch=="mipsel"', { 'defines': [ 'V8_TARGET_ARCH_MIPS', @@ -380,7 +431,7 @@ ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \ or OS=="netbsd" or OS=="mac" or OS=="android" or OS=="qnx") and \ (v8_target_arch=="arm" or v8_target_arch=="ia32" or \ - v8_target_arch=="mipsel")', { + v8_target_arch=="mips" or v8_target_arch=="mipsel")', { # Check whether the host compiler and target compiler support the # '-m32' option and set it if so. 'target_conditions': [ @@ -445,138 +496,154 @@ }], ], # conditions 'configurations': { - 'Debug': { - 'defines': [ - 'ENABLE_DISASSEMBLER', - 'V8_ENABLE_CHECKS', - 'OBJECT_PRINT', - 'VERIFY_HEAP', - 'DEBUG' - ], + # Abstract configuration for v8_optimized_debug == 0. + 'DebugBase0': { + 'abstract': 1, 'msvs_settings': { 'VCCLCompilerTool': { + 'Optimization': '0', 'conditions': [ - ['v8_optimized_debug==0', { - 'Optimization': '0', - 'conditions': [ - ['component=="shared_library"', { - 'RuntimeLibrary': '3', # /MDd - }, { - 'RuntimeLibrary': '1', # /MTd - }], - ], - }], - ['v8_optimized_debug==1', { - 'Optimization': '1', - 'InlineFunctionExpansion': '2', - 'EnableIntrinsicFunctions': 'true', - 'FavorSizeOrSpeed': '0', - 'StringPooling': 'true', - 'BasicRuntimeChecks': '0', - 'conditions': [ - ['component=="shared_library"', { - 'RuntimeLibrary': '3', # /MDd - }, { - 'RuntimeLibrary': '1', # /MTd - }], - ], - }], - ['v8_optimized_debug==2', { - 'Optimization': '2', - 'InlineFunctionExpansion': '2', - 'EnableIntrinsicFunctions': 'true', - 'FavorSizeOrSpeed': '0', - 'StringPooling': 'true', - 'BasicRuntimeChecks': '0', - 'conditions': [ - ['component=="shared_library"', { - 'RuntimeLibrary': '3', #/MDd - }, { - 'RuntimeLibrary': '1', #/MTd - }], - ['v8_target_arch=="x64"', { - # TODO(2207): remove this option once the bug is fixed. - 'WholeProgramOptimization': 'true', - }], - ], + ['component=="shared_library"', { + 'RuntimeLibrary': '3', # /MDd + }, { + 'RuntimeLibrary': '1', # /MTd }], ], }, 'VCLinkerTool': { + 'LinkIncremental': '2', + }, + }, + 'conditions': [ + ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" or \ + OS=="qnx"', { + 'cflags!': [ + '-O0', + '-O3', + '-O2', + '-O1', + '-Os', + ], + 'cflags': [ + '-fdata-sections', + '-ffunction-sections', + ], + }], + ['OS=="mac"', { + 'xcode_settings': { + 'GCC_OPTIMIZATION_LEVEL': '0', # -O0 + }, + }], + ], + }, # DebugBase0 + # Abstract configuration for v8_optimized_debug == 1. + 'DebugBase1': { + 'abstract': 1, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'Optimization': '1', + 'InlineFunctionExpansion': '2', + 'EnableIntrinsicFunctions': 'true', + 'FavorSizeOrSpeed': '0', + 'StringPooling': 'true', + 'BasicRuntimeChecks': '0', 'conditions': [ - ['v8_optimized_debug==0', { - 'LinkIncremental': '2', - }], - ['v8_optimized_debug==1', { - 'LinkIncremental': '2', - }], - ['v8_optimized_debug==2', { - 'LinkIncremental': '1', - 'OptimizeReferences': '2', - 'EnableCOMDATFolding': '2', + ['component=="shared_library"', { + 'RuntimeLibrary': '3', # /MDd + }, { + 'RuntimeLibrary': '1', # /MTd }], ], }, + 'VCLinkerTool': { + 'LinkIncremental': '2', + }, }, 'conditions': [ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" or \ OS=="qnx"', { - 'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter', - '-Wnon-virtual-dtor', '-Woverloaded-virtual', - '<(wno_array_bounds)', - ], + 'cflags!': [ + '-O0', + '-O3', # TODO(2807) should be -O1. + '-O2', + '-Os', + ], + 'cflags': [ + '-fdata-sections', + '-ffunction-sections', + '-O1', # TODO(2807) should be -O3. + ], 'conditions': [ - ['v8_optimized_debug==0', { - 'cflags!': [ - '-O0', - '-O3', - '-O2', - '-O1', - '-Os', - ], + ['gcc_version==44 and clang==0', { 'cflags': [ - '-fdata-sections', - '-ffunction-sections', + # Avoid crashes with gcc 4.4 in the v8 test suite. + '-fno-tree-vrp', ], }], - ['v8_optimized_debug==1', { - 'cflags!': [ - '-O0', - '-O3', # TODO(2807) should be -O1. - '-O2', - '-Os', - ], - 'cflags': [ - '-fdata-sections', - '-ffunction-sections', - '-O1', # TODO(2807) should be -O3. - ], + ], + }], + ['OS=="mac"', { + 'xcode_settings': { + 'GCC_OPTIMIZATION_LEVEL': '3', # -O3 + 'GCC_STRICT_ALIASING': 'YES', + }, + }], + ], + }, # DebugBase1 + # Abstract configuration for v8_optimized_debug == 2. + 'DebugBase2': { + 'abstract': 1, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'Optimization': '2', + 'InlineFunctionExpansion': '2', + 'EnableIntrinsicFunctions': 'true', + 'FavorSizeOrSpeed': '0', + 'StringPooling': 'true', + 'BasicRuntimeChecks': '0', + 'conditions': [ + ['component=="shared_library"', { + 'RuntimeLibrary': '3', #/MDd + }, { + 'RuntimeLibrary': '1', #/MTd }], - ['v8_optimized_debug==2', { - 'cflags!': [ - '-O0', - '-O1', - '-Os', - ], - 'cflags': [ - '-fdata-sections', - '-ffunction-sections', - ], - 'defines': [ - 'OPTIMIZED_DEBUG' - ], - 'conditions': [ - # TODO(crbug.com/272548): Avoid -O3 in NaCl - ['nacl_target_arch=="none"', { - 'cflags': ['-O3'], - 'cflags!': ['-O2'], - }, { - 'cflags': ['-O2'], - 'cflags!': ['-O3'], - }], - ], + ['v8_target_arch=="x64"', { + # TODO(2207): remove this option once the bug is fixed. + 'WholeProgramOptimization': 'true', + }], + ], + }, + 'VCLinkerTool': { + 'LinkIncremental': '1', + 'OptimizeReferences': '2', + 'EnableCOMDATFolding': '2', + }, + }, + 'conditions': [ + ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" or \ + OS=="qnx"', { + 'cflags!': [ + '-O0', + '-O1', + '-Os', + ], + 'cflags': [ + '-fdata-sections', + '-ffunction-sections', + ], + 'defines': [ + 'OPTIMIZED_DEBUG' + ], + 'conditions': [ + # TODO(crbug.com/272548): Avoid -O3 in NaCl + ['nacl_target_arch=="none"', { + 'cflags': ['-O3'], + 'cflags!': ['-O2'], + }, { + 'cflags': ['-O2'], + 'cflags!': ['-O3'], }], - ['v8_optimized_debug!=0 and gcc_version==44 and clang==0', { + ['gcc_version==44 and clang==0', { 'cflags': [ # Avoid crashes with gcc 4.4 in the v8 test suite. '-fno-tree-vrp', @@ -584,6 +651,29 @@ }], ], }], + ['OS=="mac"', { + 'xcode_settings': { + 'GCC_OPTIMIZATION_LEVEL': '3', # -O3 + 'GCC_STRICT_ALIASING': 'YES', + }, + }], + ], + }, # DebugBase2 + # Common settings for the Debug configuration. + 'DebugBaseCommon': { + 'abstract': 1, + 'defines': [ + 'ENABLE_DISASSEMBLER', + 'V8_ENABLE_CHECKS', + 'OBJECT_PRINT', + 'VERIFY_HEAP', + 'DEBUG' + ], + 'conditions': [ + ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" or \ + OS=="qnx"', { + 'cflags': [ '-Woverloaded-virtual', '<(wno_array_bounds)', ], + }], ['OS=="linux" and v8_enable_backtrace==1', { # Support for backtrace_symbols. 'ldflags': [ '-rdynamic' ], @@ -602,17 +692,19 @@ }], ], }], - ['OS=="mac"', { - 'xcode_settings': { - 'conditions': [ - ['v8_optimized_debug==0', { - 'GCC_OPTIMIZATION_LEVEL': '0', # -O0 - }, { - 'GCC_OPTIMIZATION_LEVEL': '3', # -O3 - 'GCC_STRICT_ALIASING': 'YES', - }], - ], - }, + ], + }, # DebugBaseCommon + 'Debug': { + 'inherit_from': ['DebugBaseCommon'], + 'conditions': [ + ['v8_optimized_debug==0', { + 'inherit_from': ['DebugBase0'], + }], + ['v8_optimized_debug==1', { + 'inherit_from': ['DebugBase1'], + }], + ['v8_optimized_debug==2', { + 'inherit_from': ['DebugBase2'], }], ], }, # Debug diff --git a/deps/v8/include/v8-debug.h b/deps/v8/include/v8-debug.h index 1a86a061e..bd3eb77c4 100644 --- a/deps/v8/include/v8-debug.h +++ b/deps/v8/include/v8-debug.h @@ -1,29 +1,6 @@ // Copyright 2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8_DEBUG_H_ #define V8_V8_DEBUG_H_ @@ -190,32 +167,28 @@ class V8_EXPORT Debug { Handle<Value> data = Handle<Value>()); // Schedule a debugger break to happen when JavaScript code is run - // in the given isolate. If no isolate is provided the default - // isolate is used. - static void DebugBreak(Isolate* isolate = NULL); + // in the given isolate. + static void DebugBreak(Isolate* isolate); // Remove scheduled debugger break in given isolate if it has not - // happened yet. If no isolate is provided the default isolate is - // used. - static void CancelDebugBreak(Isolate* isolate = NULL); + // happened yet. + static void CancelDebugBreak(Isolate* isolate); // Break execution of JavaScript in the given isolate (this method // can be invoked from a non-VM thread) for further client command // execution on a VM thread. Client data is then passed in // EventDetails to EventCallback2 at the moment when the VM actually - // stops. If no isolate is provided the default isolate is used. - static void DebugBreakForCommand(ClientData* data = NULL, - Isolate* isolate = NULL); + // stops. + static void DebugBreakForCommand(Isolate* isolate, ClientData* data); + + // TODO(svenpanne) Remove this when Chrome is updated. + static void DebugBreakForCommand(ClientData* data, Isolate* isolate) { + DebugBreakForCommand(isolate, data); + } // Message based interface. The message protocol is JSON. static void SetMessageHandler2(MessageHandler2 handler); - // If no isolate is provided the default isolate is - // used. - // TODO(dcarney): remove - static void SendCommand(const uint16_t* command, int length, - ClientData* client_data = NULL, - Isolate* isolate = NULL); static void SendCommand(Isolate* isolate, const uint16_t* command, int length, ClientData* client_data = NULL); @@ -290,7 +263,7 @@ class V8_EXPORT Debug { * * Generally when message arrives V8 may be in one of 3 states: * 1. V8 is running script; V8 will automatically interrupt and process all - * pending messages (however auto_break flag should be enabled); + * pending messages; * 2. V8 is suspended on debug breakpoint; in this state V8 is dedicated * to reading and processing debug messages; * 3. V8 is not running at all or has called some long-working C++ function; @@ -331,7 +304,12 @@ class V8_EXPORT Debug { * (default Isolate if not provided). V8 will abort if LiveEdit is * unexpectedly used. LiveEdit is enabled by default. */ - static void SetLiveEditEnabled(bool enable, Isolate* isolate = NULL); + static void SetLiveEditEnabled(Isolate* isolate, bool enable); + + // TODO(svenpanne) Remove this when Chrome is updated. + static void SetLiveEditEnabled(bool enable, Isolate* isolate) { + SetLiveEditEnabled(isolate, enable); + } }; diff --git a/deps/v8/include/v8-platform.h b/deps/v8/include/v8-platform.h index 75fddd59a..5667211c3 100644 --- a/deps/v8/include/v8-platform.h +++ b/deps/v8/include/v8-platform.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8_PLATFORM_H_ #define V8_V8_PLATFORM_H_ diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index 1691f2973..19d143e01 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8_PROFILER_H_ #define V8_V8_PROFILER_H_ @@ -106,27 +83,35 @@ class V8_EXPORT CpuProfile { const CpuProfileNode* GetTopDownRoot() const; /** - * Returns number of samples recorded. The samples are not recorded unless - * |record_samples| parameter of CpuProfiler::StartCpuProfiling is true. - */ + * Returns number of samples recorded. The samples are not recorded unless + * |record_samples| parameter of CpuProfiler::StartCpuProfiling is true. + */ int GetSamplesCount() const; /** - * Returns profile node corresponding to the top frame the sample at - * the given index. - */ + * Returns profile node corresponding to the top frame the sample at + * the given index. + */ const CpuProfileNode* GetSample(int index) const; /** - * Returns time when the profile recording started (in microseconds - * since the Epoch). - */ + * Returns the timestamp of the sample. The timestamp is the number of + * microseconds since some unspecified starting point. + * The point is equal to the starting point used by GetStartTime. + */ + int64_t GetSampleTimestamp(int index) const; + + /** + * Returns time when the profile recording was started (in microseconds) + * since some unspecified starting point. + */ int64_t GetStartTime() const; /** - * Returns time when the profile recording was stopped (in microseconds - * since the Epoch). - */ + * Returns time when the profile recording was stopped (in microseconds) + * since some unspecified starting point. + * The point is equal to the starting point used by GetStartTime. + */ int64_t GetEndTime() const; /** @@ -164,7 +149,9 @@ class V8_EXPORT CpuProfiler { void StartProfiling(Handle<String> title, bool record_samples = false); /** Deprecated. Use StartProfiling instead. */ - void StartCpuProfiling(Handle<String> title, bool record_samples = false); + V8_DEPRECATED("Use StartProfiling", + void StartCpuProfiling(Handle<String> title, + bool record_samples = false)); /** * Stops collecting CPU profile with a given title and returns it. @@ -173,7 +160,8 @@ class V8_EXPORT CpuProfiler { CpuProfile* StopProfiling(Handle<String> title); /** Deprecated. Use StopProfiling instead. */ - const CpuProfile* StopCpuProfiling(Handle<String> title); + V8_DEPRECATED("Use StopProfiling", + const CpuProfile* StopCpuProfiling(Handle<String> title)); /** * Tells the profiler whether the embedder is idle. diff --git a/deps/v8/include/v8-testing.h b/deps/v8/include/v8-testing.h index ba4fcc44e..c827b6940 100644 --- a/deps/v8/include/v8-testing.h +++ b/deps/v8/include/v8-testing.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8_TEST_H_ #define V8_V8_TEST_H_ diff --git a/deps/v8/include/v8-util.h b/deps/v8/include/v8-util.h index 3f8cc6d26..60feff549 100644 --- a/deps/v8/include/v8-util.h +++ b/deps/v8/include/v8-util.h @@ -1,35 +1,13 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UTIL_H_ #define V8_UTIL_H_ #include "v8.h" #include <map> +#include <vector> /** * Support for Persistent containers. @@ -42,6 +20,10 @@ namespace v8 { typedef uintptr_t PersistentContainerValue; static const uintptr_t kPersistentContainerNotFound = 0; +enum PersistentContainerCallbackType { + kNotWeak, + kWeak +}; /** @@ -92,38 +74,34 @@ class StdMapTraits { /** * A default trait implementation for PersistentValueMap, which inherits * a std:map backing map from StdMapTraits and holds non-weak persistent - * objects. + * objects and has no special Dispose handling. * - * Users have to implement their own dispose trait. + * You should not derive from this class, since MapType depends on the + * surrounding class, and hence a subclass cannot simply inherit the methods. */ template<typename K, typename V> -class StrongMapTraits : public StdMapTraits<K, V> { +class DefaultPersistentValueMapTraits : public StdMapTraits<K, V> { public: // Weak callback & friends: - static const bool kIsWeak = false; - typedef typename StdMapTraits<K, V>::Impl Impl; + static const PersistentContainerCallbackType kCallbackType = kNotWeak; + typedef PersistentValueMap<K, V, DefaultPersistentValueMapTraits<K, V> > + MapType; typedef void WeakCallbackDataType; + static WeakCallbackDataType* WeakCallbackParameter( - Impl* impl, const K& key, Local<V> value); - static Impl* ImplFromWeakCallbackData( - const WeakCallbackData<V, WeakCallbackDataType>& data); + MapType* map, const K& key, Local<V> value) { + return NULL; + } + static MapType* MapFromWeakCallbackData( + const WeakCallbackData<V, WeakCallbackDataType>& data) { + return NULL; + } static K KeyFromWeakCallbackData( - const WeakCallbackData<V, WeakCallbackDataType>& data); - static void DisposeCallbackData(WeakCallbackDataType* data); -}; - - -/** - * A default trait implementation for PersistentValueMap, with a std::map - * backing map, non-weak persistents as values, and no special dispose - * handling. Can be used as-is. - */ -template<typename K, typename V> -class DefaultPersistentValueMapTraits : public StrongMapTraits<K, V> { - public: - typedef typename StrongMapTraits<K, V>::Impl Impl; - static void Dispose(Isolate* isolate, UniquePersistent<V> value, - Impl* impl, K key) { } + const WeakCallbackData<V, WeakCallbackDataType>& data) { + return K(); + } + static void DisposeCallbackData(WeakCallbackDataType* data) { } + static void Dispose(Isolate* isolate, UniquePersistent<V> value, K key) { } }; @@ -140,55 +118,49 @@ class DefaultPersistentValueMapTraits : public StrongMapTraits<K, V> { template<typename K, typename V, typename Traits> class PersistentValueMap { public: - V8_INLINE explicit PersistentValueMap(Isolate* isolate) : isolate_(isolate) {} + explicit PersistentValueMap(Isolate* isolate) : isolate_(isolate) {} - V8_INLINE ~PersistentValueMap() { Clear(); } + ~PersistentValueMap() { Clear(); } - V8_INLINE Isolate* GetIsolate() { return isolate_; } + Isolate* GetIsolate() { return isolate_; } /** * Return size of the map. */ - V8_INLINE size_t Size() { return Traits::Size(&impl_); } + size_t Size() { return Traits::Size(&impl_); } /** * Return whether the map holds weak persistents. */ - V8_INLINE bool IsWeak() { return Traits::kIsWeak; } + bool IsWeak() { return Traits::kCallbackType != kNotWeak; } /** * Get value stored in map. */ - V8_INLINE Local<V> Get(const K& key) { + Local<V> Get(const K& key) { return Local<V>::New(isolate_, FromVal(Traits::Get(&impl_, key))); } /** * Check whether a value is contained in the map. */ - V8_INLINE bool Contains(const K& key) { - return Traits::Get(&impl_, key) != 0; + bool Contains(const K& key) { + return Traits::Get(&impl_, key) != kPersistentContainerNotFound; } /** * Get value stored in map and set it in returnValue. * Return true if a value was found. */ - V8_INLINE bool SetReturnValue(const K& key, - ReturnValue<Value>& returnValue) { - PersistentContainerValue value = Traits::Get(&impl_, key); - bool hasValue = value != 0; - if (hasValue) { - returnValue.SetInternal( - *reinterpret_cast<internal::Object**>(FromVal(value))); - } - return hasValue; + bool SetReturnValue(const K& key, + ReturnValue<Value> returnValue) { + return SetReturnValueFromVal(returnValue, Traits::Get(&impl_, key)); } /** * Call Isolate::SetReference with the given parent and the map value. */ - V8_INLINE void SetReference(const K& key, + void SetReference(const K& key, const Persistent<Object>& parent) { GetIsolate()->SetReference( reinterpret_cast<internal::Object**>(parent.val_), @@ -215,7 +187,7 @@ class PersistentValueMap { /** * Return value for key and remove it from the map. */ - V8_INLINE UniquePersistent<V> Remove(const K& key) { + UniquePersistent<V> Remove(const K& key) { return Release(Traits::Remove(&impl_, key)).Pass(); } @@ -231,12 +203,76 @@ class PersistentValueMap { typename Traits::Impl impl; Traits::Swap(impl_, impl); for (It i = Traits::Begin(&impl); i != Traits::End(&impl); ++i) { - Traits::Dispose(isolate_, Release(Traits::Value(i)).Pass(), &impl, - Traits::Key(i)); + Traits::Dispose(isolate_, Release(Traits::Value(i)).Pass(), + Traits::Key(i)); } } } + /** + * Helper class for GetReference/SetWithReference. Do not use outside + * that context. + */ + class PersistentValueReference { + public: + PersistentValueReference() : value_(kPersistentContainerNotFound) { } + PersistentValueReference(const PersistentValueReference& other) + : value_(other.value_) { } + + Local<V> NewLocal(Isolate* isolate) const { + return Local<V>::New(isolate, FromVal(value_)); + } + bool IsEmpty() const { + return value_ == kPersistentContainerNotFound; + } + template<typename T> + bool SetReturnValue(ReturnValue<T> returnValue) { + return SetReturnValueFromVal(returnValue, value_); + } + void Reset() { + value_ = kPersistentContainerNotFound; + } + void operator=(const PersistentValueReference& other) { + value_ = other.value_; + } + + private: + friend class PersistentValueMap; + + explicit PersistentValueReference(PersistentContainerValue value) + : value_(value) { } + + void operator=(PersistentContainerValue value) { + value_ = value; + } + + PersistentContainerValue value_; + }; + + /** + * Get a reference to a map value. This enables fast, repeated access + * to a value stored in the map while the map remains unchanged. + * + * Careful: This is potentially unsafe, so please use with care. + * The value will become invalid if the value for this key changes + * in the underlying map, as a result of Set or Remove for the same + * key; as a result of the weak callback for the same key; or as a + * result of calling Clear() or destruction of the map. + */ + PersistentValueReference GetReference(const K& key) { + return PersistentValueReference(Traits::Get(&impl_, key)); + } + + /** + * Put a value into the map and update the reference. + * Restrictions of GetReference apply here as well. + */ + UniquePersistent<V> Set(const K& key, UniquePersistent<V> value, + PersistentValueReference* reference) { + *reference = Leak(&value); + return SetUnique(key, &value); + } + private: PersistentValueMap(PersistentValueMap&); void operator=(PersistentValueMap&); @@ -246,10 +282,10 @@ class PersistentValueMap { * by the Traits class. */ UniquePersistent<V> SetUnique(const K& key, UniquePersistent<V>* persistent) { - if (Traits::kIsWeak) { + if (Traits::kCallbackType != kNotWeak) { Local<V> value(Local<V>::New(isolate_, *persistent)); persistent->template SetWeak<typename Traits::WeakCallbackDataType>( - Traits::WeakCallbackParameter(&impl_, key, value), WeakCallback); + Traits::WeakCallbackParameter(this, key, value), WeakCallback); } PersistentContainerValue old_value = Traits::Set(&impl_, key, ClearAndLeak(persistent)); @@ -258,34 +294,50 @@ class PersistentValueMap { static void WeakCallback( const WeakCallbackData<V, typename Traits::WeakCallbackDataType>& data) { - if (Traits::kIsWeak) { - typename Traits::Impl* impl = Traits::ImplFromWeakCallbackData(data); + if (Traits::kCallbackType != kNotWeak) { + PersistentValueMap<K, V, Traits>* persistentValueMap = + Traits::MapFromWeakCallbackData(data); K key = Traits::KeyFromWeakCallbackData(data); - PersistentContainerValue value = Traits::Remove(impl, key); - Traits::Dispose(data.GetIsolate(), Release(value).Pass(), impl, key); + Traits::Dispose(data.GetIsolate(), + persistentValueMap->Remove(key).Pass(), key); } } - V8_INLINE static V* FromVal(PersistentContainerValue v) { + static V* FromVal(PersistentContainerValue v) { return reinterpret_cast<V*>(v); } - V8_INLINE static PersistentContainerValue ClearAndLeak( + static bool SetReturnValueFromVal( + ReturnValue<Value>& returnValue, PersistentContainerValue value) { + bool hasValue = value != kPersistentContainerNotFound; + if (hasValue) { + returnValue.SetInternal( + *reinterpret_cast<internal::Object**>(FromVal(value))); + } + return hasValue; + } + + static PersistentContainerValue ClearAndLeak( UniquePersistent<V>* persistent) { V* v = persistent->val_; persistent->val_ = 0; return reinterpret_cast<PersistentContainerValue>(v); } + static PersistentContainerValue Leak( + UniquePersistent<V>* persistent) { + return reinterpret_cast<PersistentContainerValue>(persistent->val_); + } + /** * Return a container value as UniquePersistent and make sure the weak * callback is properly disposed of. All remove functionality should go * through this. */ - V8_INLINE static UniquePersistent<V> Release(PersistentContainerValue v) { + static UniquePersistent<V> Release(PersistentContainerValue v) { UniquePersistent<V> p; p.val_ = FromVal(v); - if (Traits::kIsWeak && !p.IsEmpty()) { + if (Traits::kCallbackType != kNotWeak && !p.IsEmpty()) { Traits::DisposeCallbackData( p.template ClearWeak<typename Traits::WeakCallbackDataType>()); } @@ -313,42 +365,121 @@ class StdPersistentValueMap : public PersistentValueMap<K, V, Traits> { }; +class DefaultPersistentValueVectorTraits { + public: + typedef std::vector<PersistentContainerValue> Impl; + + static void Append(Impl* impl, PersistentContainerValue value) { + impl->push_back(value); + } + static bool IsEmpty(const Impl* impl) { + return impl->empty(); + } + static size_t Size(const Impl* impl) { + return impl->size(); + } + static PersistentContainerValue Get(const Impl* impl, size_t i) { + return (i < impl->size()) ? impl->at(i) : kPersistentContainerNotFound; + } + static void ReserveCapacity(Impl* impl, size_t capacity) { + impl->reserve(capacity); + } + static void Clear(Impl* impl) { + impl->clear(); + } +}; + + /** - * Empty default implementations for StrongTraits methods. - * - * These should not be necessary, since they're only used in code that - * is surrounded by if(Traits::kIsWeak), which for StrongMapTraits is - * compile-time false. Most compilers can live without them; however - * the compiler we use from 64-bit Win differs. + * A vector wrapper that safely stores UniquePersistent values. + * C++11 embedders don't need this class, as they can use UniquePersistent + * directly in std containers. * - * TODO(vogelheim): Remove these once they're no longer necessary. + * This class relies on a backing vector implementation, whose type and methods + * are described by the Traits class. The backing map will handle values of type + * PersistentContainerValue, with all conversion into and out of V8 + * handles being transparently handled by this class. */ -template<typename K, typename V> -typename StrongMapTraits<K, V>::WeakCallbackDataType* - StrongMapTraits<K, V>::WeakCallbackParameter( - Impl* impl, const K& key, Local<V> value) { - return NULL; -} +template<typename V, typename Traits = DefaultPersistentValueVectorTraits> +class PersistentValueVector { + public: + explicit PersistentValueVector(Isolate* isolate) : isolate_(isolate) { } + ~PersistentValueVector() { + Clear(); + } -template<typename K, typename V> -typename StrongMapTraits<K, V>::Impl* - StrongMapTraits<K, V>::ImplFromWeakCallbackData( - const WeakCallbackData<V, WeakCallbackDataType>& data) { - return NULL; -} + /** + * Append a value to the vector. + */ + void Append(Local<V> value) { + UniquePersistent<V> persistent(isolate_, value); + Traits::Append(&impl_, ClearAndLeak(&persistent)); + } + /** + * Append a persistent's value to the vector. + */ + void Append(UniquePersistent<V> persistent) { + Traits::Append(&impl_, ClearAndLeak(&persistent)); + }; -template<typename K, typename V> -K StrongMapTraits<K, V>::KeyFromWeakCallbackData( - const WeakCallbackData<V, WeakCallbackDataType>& data) { - return K(); -} + /** + * Are there any values in the vector? + */ + bool IsEmpty() const { + return Traits::IsEmpty(&impl_); + } + /** + * How many elements are in the vector? + */ + size_t Size() const { + return Traits::Size(&impl_); + } -template<typename K, typename V> -void StrongMapTraits<K, V>::DisposeCallbackData(WeakCallbackDataType* data) { -} + /** + * Retrieve the i-th value in the vector. + */ + Local<V> Get(size_t index) const { + return Local<V>::New(isolate_, FromVal(Traits::Get(&impl_, index))); + } + + /** + * Remove all elements from the vector. + */ + void Clear() { + size_t length = Traits::Size(&impl_); + for (size_t i = 0; i < length; i++) { + UniquePersistent<V> p; + p.val_ = FromVal(Traits::Get(&impl_, i)); + } + Traits::Clear(&impl_); + } + + /** + * Reserve capacity in the vector. + * (Efficiency gains depend on the backing implementation.) + */ + void ReserveCapacity(size_t capacity) { + Traits::ReserveCapacity(&impl_, capacity); + } + + private: + static PersistentContainerValue ClearAndLeak( + UniquePersistent<V>* persistent) { + V* v = persistent->val_; + persistent->val_ = 0; + return reinterpret_cast<PersistentContainerValue>(v); + } + + static V* FromVal(PersistentContainerValue v) { + return reinterpret_cast<V*>(v); + } + + Isolate* isolate_; + typename Traits::Impl impl_; +}; } // namespace v8 diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 608e3c52c..d39dca96b 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. /** \mainpage V8 API Reference Guide * @@ -129,6 +106,7 @@ template<class T, class M = NonCopyablePersistentTraits<T> > class Persistent; template<class T> class UniquePersistent; template<class K, class V, class T> class PersistentValueMap; +template<class V, class T> class PersistentValueVector; template<class T, class P> class WeakCallbackObject; class FunctionTemplate; class ObjectTemplate; @@ -315,15 +293,6 @@ template <class T> class Handle { return New(isolate, that.val_); } -#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR - - private: -#endif - /** - * Creates a new handle for the specified value. - */ - V8_INLINE explicit Handle(T* val) : val_(val) {} - private: friend class Utils; template<class F, class M> friend class Persistent; @@ -342,6 +311,11 @@ template <class T> class Handle { friend class Object; friend class Private; + /** + * Creates a new handle for the specified value. + */ + V8_INLINE explicit Handle(T* val) : val_(val) {} + V8_INLINE static Handle<T> New(Isolate* isolate, T* that); T* val_; @@ -395,12 +369,6 @@ template <class T> class Local : public Handle<T> { V8_INLINE static Local<T> New(Isolate* isolate, const PersistentBase<T>& that); -#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR - - private: -#endif - template <class S> V8_INLINE Local(S* that) : Handle<T>(that) { } - private: friend class Utils; template<class F> friend class Eternal; @@ -417,7 +385,9 @@ template <class T> class Local : public Handle<T> { friend class HandleScope; friend class EscapableHandleScope; template<class F1, class F2, class F3> friend class PersistentValueMap; + template<class F1, class F2> friend class PersistentValueVector; + template <class S> V8_INLINE Local(S* that) : Handle<T>(that) { } V8_INLINE static Local<T> New(Isolate* isolate, T* that); }; @@ -522,6 +492,13 @@ template <class T> class PersistentBase { return !operator==(that); } + /** + * Install a finalization callback on this object. + * NOTE: There is no guarantee as to *when* or even *if* the callback is + * invoked. The invocation is performed solely on a best effort basis. + * As always, GC-based finalization should *not* be relied upon for any + * critical form of resource management! + */ template<typename P> V8_INLINE void SetWeak( P* parameter, @@ -586,6 +563,7 @@ template <class T> class PersistentBase { template<class F> friend class PersistentBase; template<class F> friend class ReturnValue; template<class F1, class F2, class F3> friend class PersistentValueMap; + template<class F1, class F2> friend class PersistentValueVector; friend class Object; explicit V8_INLINE PersistentBase(T* val) : val_(val) {} @@ -719,15 +697,6 @@ template <class T, class M> class Persistent : public PersistentBase<T> { // This will be removed. V8_INLINE T* ClearAndLeak(); - // TODO(dcarney): remove -#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR - - private: -#endif - template <class S> V8_INLINE Persistent(S* that) : PersistentBase<T>(that) { } - - V8_INLINE T* operator*() const { return this->val_; } - private: friend class Isolate; friend class Utils; @@ -736,6 +705,8 @@ template <class T, class M> class Persistent : public PersistentBase<T> { template<class F1, class F2> friend class Persistent; template<class F> friend class ReturnValue; + template <class S> V8_INLINE Persistent(S* that) : PersistentBase<T>(that) { } + V8_INLINE T* operator*() const { return this->val_; } template<class S, class M2> V8_INLINE void Copy(const Persistent<S, M2>& that); }; @@ -804,7 +775,7 @@ class UniquePersistent : public PersistentBase<T> { /** * Pass allows returning uniques from functions, etc. */ - V8_INLINE UniquePersistent Pass() { return UniquePersistent(RValue(this)); } + UniquePersistent Pass() { return UniquePersistent(RValue(this)); } private: UniquePersistent(UniquePersistent&); @@ -937,53 +908,6 @@ class V8_EXPORT Data { /** - * Pre-compilation data that can be associated with a script. This - * data can be calculated for a script in advance of actually - * compiling it, and can be stored between compilations. When script - * data is given to the compile method compilation will be faster. - */ -class V8_EXPORT ScriptData { // NOLINT - public: - virtual ~ScriptData() { } - - /** - * Pre-compiles the specified script (context-independent). - * - * NOTE: Pre-compilation using this method cannot happen on another thread - * without using Lockers. - * - * \param source Script source code. - */ - static ScriptData* PreCompile(Handle<String> source); - - /** - * Load previous pre-compilation data. - * - * \param data Pointer to data returned by a call to Data() of a previous - * ScriptData. Ownership is not transferred. - * \param length Length of data. - */ - static ScriptData* New(const char* data, int length); - - /** - * Returns the length of Data(). - */ - virtual int Length() = 0; - - /** - * Returns a serialized representation of this ScriptData that can later be - * passed to New(). NOTE: Serialized data is platform-dependent. - */ - virtual const char* Data() = 0; - - /** - * Returns true if the source code could not be parsed. - */ - virtual bool HasError() = 0; -}; - - -/** * The origin, within a file, of a script. */ class ScriptOrigin { @@ -1040,12 +964,9 @@ class V8_EXPORT Script { public: /** * A shorthand for ScriptCompiler::Compile(). - * The ScriptData parameter will be deprecated; use ScriptCompiler::Compile if - * you want to pass it. */ static Local<Script> Compile(Handle<String> source, - ScriptOrigin* origin = NULL, - ScriptData* script_data = NULL); + ScriptOrigin* origin = NULL); // To be decprecated, use the Compile above. static Local<Script> Compile(Handle<String> source, @@ -1210,12 +1131,6 @@ class V8_EXPORT Message { Handle<Value> GetScriptResourceName() const; /** - * Returns the resource data for the script from where the function causing - * the error originates. - */ - Handle<Value> GetScriptData() const; - - /** * Exception stack trace. By default stack traces are not captured for * uncaught exceptions. SetCaptureStackTraceForUncaughtExceptions allows * to change this option. @@ -2578,7 +2493,7 @@ class PropertyCallbackInfo { public: V8_INLINE Isolate* GetIsolate() const; V8_INLINE Local<Value> Data() const; - V8_INLINE Local<Object> This() const; + V8_INLINE Local<Value> This() const; V8_INLINE Local<Object> Holder() const; V8_INLINE ReturnValue<T> GetReturnValue() const; // This shouldn't be public, but the arm compiler needs it. @@ -3941,14 +3856,17 @@ class V8_EXPORT ResourceConstraints { * * \param physical_memory The total amount of physical memory on the current * device, in bytes. + * \param virtual_memory_limit The amount of virtual memory on the current + * device, in bytes, or zero, if there is no limit. * \param number_of_processors The number of CPUs available on the current * device. */ void ConfigureDefaults(uint64_t physical_memory, + uint64_t virtual_memory_limit, uint32_t number_of_processors); - int max_young_space_size() const { return max_young_space_size_; } - void set_max_young_space_size(int value) { max_young_space_size_ = value; } + int max_new_space_size() const { return max_new_space_size_; } + void set_max_new_space_size(int value) { max_new_space_size_ = value; } int max_old_space_size() const { return max_old_space_size_; } void set_max_old_space_size(int value) { max_old_space_size_ = value; } int max_executable_size() const { return max_executable_size_; } @@ -3961,13 +3879,18 @@ class V8_EXPORT ResourceConstraints { void set_max_available_threads(int value) { max_available_threads_ = value; } + int code_range_size() const { return code_range_size_; } + void set_code_range_size(int value) { + code_range_size_ = value; + } private: - int max_young_space_size_; + int max_new_space_size_; int max_old_space_size_; int max_executable_size_; uint32_t* stack_limit_; int max_available_threads_; + int code_range_size_; }; @@ -4147,7 +4070,7 @@ class V8_EXPORT Isolate { /** * Assert that no Javascript code is invoked. */ - class DisallowJavascriptExecutionScope { + class V8_EXPORT DisallowJavascriptExecutionScope { public: enum OnFailure { CRASH_ON_FAILURE, THROW_ON_FAILURE }; @@ -4168,7 +4091,7 @@ class V8_EXPORT Isolate { /** * Introduce exception to DisallowJavascriptExecutionScope. */ - class AllowJavascriptExecutionScope { + class V8_EXPORT AllowJavascriptExecutionScope { public: explicit AllowJavascriptExecutionScope(Isolate* isolate); ~AllowJavascriptExecutionScope(); @@ -4184,6 +4107,24 @@ class V8_EXPORT Isolate { }; /** + * Do not run microtasks while this scope is active, even if microtasks are + * automatically executed otherwise. + */ + class V8_EXPORT SuppressMicrotaskExecutionScope { + public: + explicit SuppressMicrotaskExecutionScope(Isolate* isolate); + ~SuppressMicrotaskExecutionScope(); + + private: + internal::Isolate* isolate_; + + // Prevent copying of Scope objects. + SuppressMicrotaskExecutionScope(const SuppressMicrotaskExecutionScope&); + SuppressMicrotaskExecutionScope& operator=( + const SuppressMicrotaskExecutionScope&); + }; + + /** * Types of garbage collections that can be requested via * RequestGarbageCollectionForTesting. */ @@ -4418,6 +4359,36 @@ class V8_EXPORT Isolate { */ void SetEventLogger(LogEventCallback that); + /** + * Adds a callback to notify the host application when a script finished + * running. If a script re-enters the runtime during executing, the + * CallCompletedCallback is only invoked when the outer-most script + * execution ends. Executing scripts inside the callback do not trigger + * further callbacks. + */ + void AddCallCompletedCallback(CallCompletedCallback callback); + + /** + * Removes callback that was installed by AddCallCompletedCallback. + */ + void RemoveCallCompletedCallback(CallCompletedCallback callback); + + /** + * Experimental: Runs the Microtask Work Queue until empty + */ + void RunMicrotasks(); + + /** + * Experimental: Enqueues the callback to the Microtask Work Queue + */ + void EnqueueMicrotask(Handle<Function> microtask); + + /** + * Experimental: Controls whether the Microtask Work Queue is automatically + * run when the script call depth decrements to zero. + */ + void SetAutorunMicrotasks(bool autorun); + private: template<class K, class V, class Traits> friend class PersistentValueMap; @@ -4780,32 +4751,24 @@ class V8_EXPORT V8 { static void RemoveMemoryAllocationCallback(MemoryAllocationCallback callback); /** - * Adds a callback to notify the host application when a script finished - * running. If a script re-enters the runtime during executing, the - * CallCompletedCallback is only invoked when the outer-most script - * execution ends. Executing scripts inside the callback do not trigger - * further callbacks. - */ - static void AddCallCompletedCallback(CallCompletedCallback callback); - - /** - * Removes callback that was installed by AddCallCompletedCallback. - */ - static void RemoveCallCompletedCallback(CallCompletedCallback callback); - - /** * Experimental: Runs the Microtask Work Queue until empty + * + * Deprecated: Use methods on Isolate instead. */ static void RunMicrotasks(Isolate* isolate); /** * Experimental: Enqueues the callback to the Microtask Work Queue + * + * Deprecated: Use methods on Isolate instead. */ static void EnqueueMicrotask(Isolate* isolate, Handle<Function> microtask); /** * Experimental: Controls whether the Microtask Work Queue is automatically * run when the script call depth decrements to zero. + * + * Deprecated: Use methods on Isolate instead. */ static void SetAutorunMicrotasks(Isolate *source, bool autorun); @@ -4870,15 +4833,14 @@ class V8_EXPORT V8 { /** * Forcefully terminate the current thread of JavaScript execution - * in the given isolate. If no isolate is provided, the default - * isolate is used. + * in the given isolate. * * This method can be used by any thread even if that thread has not * acquired the V8 lock with a Locker object. * * \param isolate The isolate in which to terminate the current JS execution. */ - static void TerminateExecution(Isolate* isolate = NULL); + static void TerminateExecution(Isolate* isolate); /** * Is V8 terminating JavaScript execution. @@ -5246,8 +5208,13 @@ class V8_EXPORT Context { */ void Exit(); - /** Returns true if the context has experienced an out of memory situation. */ - bool HasOutOfMemoryException() { return false; } + /** + * Returns true if the context has experienced an out of memory situation. + * Since V8 always treats OOM as fatal error, this can no longer return true. + * Therefore this is now deprecated. + * */ + V8_DEPRECATED("This can no longer happen. OOM is a fatal error.", + bool HasOutOfMemoryException()) { return false; } /** Returns an isolate associated with a current context. */ v8::Isolate* GetIsolate(); @@ -5559,7 +5526,7 @@ class Internals { static const int kJSObjectHeaderSize = 3 * kApiPointerSize; static const int kFixedArrayHeaderSize = 2 * kApiPointerSize; static const int kContextHeaderSize = 2 * kApiPointerSize; - static const int kContextEmbedderDataIndex = 65; + static const int kContextEmbedderDataIndex = 74; static const int kFullStringRepresentationMask = 0x07; static const int kStringEncodingMask = 0x4; static const int kExternalTwoByteRepresentationTag = 0x02; @@ -5571,7 +5538,7 @@ class Internals { static const int kNullValueRootIndex = 7; static const int kTrueValueRootIndex = 8; static const int kFalseValueRootIndex = 9; - static const int kEmptyStringRootIndex = 154; + static const int kEmptyStringRootIndex = 162; static const int kNodeClassIdOffset = 1 * kApiPointerSize; static const int kNodeFlagsOffset = 1 * kApiPointerSize + 3; @@ -6521,8 +6488,8 @@ Local<Value> PropertyCallbackInfo<T>::Data() const { template<typename T> -Local<Object> PropertyCallbackInfo<T>::This() const { - return Local<Object>(reinterpret_cast<Object*>(&args_[kThisIndex])); +Local<Value> PropertyCallbackInfo<T>::This() const { + return Local<Value>(reinterpret_cast<Value*>(&args_[kThisIndex])); } diff --git a/deps/v8/include/v8config.h b/deps/v8/include/v8config.h index 0d99e5658..452ffc73e 100644 --- a/deps/v8/include/v8config.h +++ b/deps/v8/include/v8config.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8CONFIG_H_ #define V8CONFIG_H_ diff --git a/deps/v8/include/v8stdint.h b/deps/v8/include/v8stdint.h index 0b49b3791..9a935ddde 100644 --- a/deps/v8/include/v8stdint.h +++ b/deps/v8/include/v8stdint.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Load definitions of standard types. diff --git a/deps/v8/samples/lineprocessor.cc b/deps/v8/samples/lineprocessor.cc index a8ad0318d..f259ea4e9 100644 --- a/deps/v8/samples/lineprocessor.cc +++ b/deps/v8/samples/lineprocessor.cc @@ -27,9 +27,7 @@ #include <v8.h> -#ifdef ENABLE_DEBUGGER_SUPPORT #include <v8-debug.h> -#endif // ENABLE_DEBUGGER_SUPPORT #include <fcntl.h> #include <string.h> @@ -109,7 +107,6 @@ bool RunCppCycle(v8::Handle<v8::Script> script, bool report_exceptions); -#ifdef ENABLE_DEBUGGER_SUPPORT v8::Persistent<v8::Context> debug_message_context; void DispatchDebugMessages() { @@ -132,7 +129,6 @@ void DispatchDebugMessages() { v8::Debug::ProcessDebugMessages(); } -#endif // ENABLE_DEBUGGER_SUPPORT int RunMain(int argc, char* argv[]) { @@ -144,11 +140,9 @@ int RunMain(int argc, char* argv[]) { v8::Handle<v8::Value> script_name; int script_param_counter = 0; -#ifdef ENABLE_DEBUGGER_SUPPORT int port_number = -1; bool wait_for_connection = false; bool support_callback = false; -#endif // ENABLE_DEBUGGER_SUPPORT MainCycleType cycle_type = CycleInCpp; @@ -162,7 +156,6 @@ int RunMain(int argc, char* argv[]) { cycle_type = CycleInCpp; } else if (strcmp(str, "--main-cycle-in-js") == 0) { cycle_type = CycleInJs; -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (strcmp(str, "--callback") == 0) { support_callback = true; } else if (strcmp(str, "--wait-for-connection") == 0) { @@ -170,7 +163,6 @@ int RunMain(int argc, char* argv[]) { } else if (strcmp(str, "-p") == 0 && i + 1 < argc) { port_number = atoi(argv[i + 1]); // NOLINT i++; -#endif // ENABLE_DEBUGGER_SUPPORT } else if (strncmp(str, "--", 2) == 0) { printf("Warning: unknown flag %s.\nTry --help for options\n", str); } else if (strcmp(str, "-e") == 0 && i + 1 < argc) { @@ -218,7 +210,6 @@ int RunMain(int argc, char* argv[]) { // Enter the newly created execution environment. v8::Context::Scope context_scope(context); -#ifdef ENABLE_DEBUGGER_SUPPORT debug_message_context.Reset(isolate, context); v8::Locker locker(isolate); @@ -230,7 +221,6 @@ int RunMain(int argc, char* argv[]) { if (port_number != -1) { v8::Debug::EnableAgent("lineprocessor", port_number, wait_for_connection); } -#endif // ENABLE_DEBUGGER_SUPPORT bool report_exceptions = true; @@ -275,9 +265,7 @@ bool RunCppCycle(v8::Handle<v8::Script> script, v8::Local<v8::Context> context, bool report_exceptions) { v8::Isolate* isolate = context->GetIsolate(); -#ifdef ENABLE_DEBUGGER_SUPPORT v8::Locker lock(isolate); -#endif // ENABLE_DEBUGGER_SUPPORT v8::Handle<v8::String> fun_name = v8::String::NewFromUtf8(isolate, "ProcessLine"); @@ -435,9 +423,7 @@ v8::Handle<v8::String> ReadLine() { char* res; { -#ifdef ENABLE_DEBUGGER_SUPPORT v8::Unlocker unlocker(v8::Isolate::GetCurrent()); -#endif // ENABLE_DEBUGGER_SUPPORT res = fgets(buffer, kBufferSize, stdin); } v8::Isolate* isolate = v8::Isolate::GetCurrent(); diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc index 35cff1af7..8c8fcdd99 100644 --- a/deps/v8/src/accessors.cc +++ b/deps/v8/src/accessors.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "accessors.h" @@ -37,11 +14,33 @@ #include "isolate.h" #include "list-inl.h" #include "property-details.h" +#include "api.h" namespace v8 { namespace internal { +Handle<AccessorInfo> Accessors::MakeAccessor( + Isolate* isolate, + Handle<String> name, + AccessorGetterCallback getter, + AccessorSetterCallback setter, + PropertyAttributes attributes) { + Factory* factory = isolate->factory(); + Handle<ExecutableAccessorInfo> info = factory->NewExecutableAccessorInfo(); + info->set_property_attributes(attributes); + info->set_all_can_read(false); + info->set_all_can_write(false); + info->set_prohibits_overwriting(false); + info->set_name(*name); + Handle<Object> get = v8::FromCData(isolate, getter); + Handle<Object> set = v8::FromCData(isolate, setter); + info->set_getter(*get); + info->set_setter(*set); + return info; +} + + template <class C> static C* FindInstanceOf(Isolate* isolate, Object* obj) { for (Object* cur = obj; !cur->IsNull(); cur = cur->GetPrototype(isolate)) { @@ -51,39 +50,11 @@ static C* FindInstanceOf(Isolate* isolate, Object* obj) { } -// Entry point that never should be called. -MaybeObject* Accessors::IllegalSetter(Isolate* isolate, - JSObject*, - Object*, - void*) { - UNREACHABLE(); - return NULL; -} - - -Object* Accessors::IllegalGetAccessor(Isolate* isolate, - Object* object, - void*) { - UNREACHABLE(); - return object; -} - - -MaybeObject* Accessors::ReadOnlySetAccessor(Isolate* isolate, - JSObject*, - Object* value, - void*) { - // According to ECMA-262, section 8.6.2.2, page 28, setting - // read-only properties must be silently ignored. - return value; -} - - static V8_INLINE bool CheckForName(Handle<String> name, - String* property_name, + Handle<String> property_name, int offset, int* object_offset) { - if (name->Equals(property_name)) { + if (String::Equals(name, property_name)) { *object_offset = offset; return true; } @@ -100,35 +71,35 @@ bool Accessors::IsJSObjectFieldAccessor(typename T::TypeHandle type, Isolate* isolate = name->GetIsolate(); if (type->Is(T::String())) { - return CheckForName(name, isolate->heap()->length_string(), + return CheckForName(name, isolate->factory()->length_string(), String::kLengthOffset, object_offset); } if (!type->IsClass()) return false; - Handle<Map> map = type->AsClass(); + Handle<Map> map = type->AsClass()->Map(); switch (map->instance_type()) { case JS_ARRAY_TYPE: return - CheckForName(name, isolate->heap()->length_string(), + CheckForName(name, isolate->factory()->length_string(), JSArray::kLengthOffset, object_offset); case JS_TYPED_ARRAY_TYPE: return - CheckForName(name, isolate->heap()->length_string(), + CheckForName(name, isolate->factory()->length_string(), JSTypedArray::kLengthOffset, object_offset) || - CheckForName(name, isolate->heap()->byte_length_string(), + CheckForName(name, isolate->factory()->byte_length_string(), JSTypedArray::kByteLengthOffset, object_offset) || - CheckForName(name, isolate->heap()->byte_offset_string(), + CheckForName(name, isolate->factory()->byte_offset_string(), JSTypedArray::kByteOffsetOffset, object_offset); case JS_ARRAY_BUFFER_TYPE: return - CheckForName(name, isolate->heap()->byte_length_string(), + CheckForName(name, isolate->factory()->byte_length_string(), JSArrayBuffer::kByteLengthOffset, object_offset); case JS_DATA_VIEW_TYPE: return - CheckForName(name, isolate->heap()->byte_length_string(), + CheckForName(name, isolate->factory()->byte_length_string(), JSDataView::kByteLengthOffset, object_offset) || - CheckForName(name, isolate->heap()->byte_offset_string(), + CheckForName(name, isolate->factory()->byte_offset_string(), JSDataView::kByteOffsetOffset, object_offset); default: return false; @@ -153,15 +124,6 @@ bool Accessors::IsJSObjectFieldAccessor<HeapType>(Handle<HeapType> type, // -MaybeObject* Accessors::ArrayGetLength(Isolate* isolate, - Object* object, - void*) { - // Traverse the prototype chain until we reach an array. - JSArray* holder = FindInstanceOf<JSArray>(isolate, object); - return holder == NULL ? Smi::FromInt(0) : holder->length(); -} - - // The helper function will 'flatten' Number objects. Handle<Object> Accessors::FlattenNumber(Isolate* isolate, Handle<Object> value) { @@ -178,174 +140,306 @@ Handle<Object> Accessors::FlattenNumber(Isolate* isolate, } -MaybeObject* Accessors::ArraySetLength(Isolate* isolate, - JSObject* object_raw, - Object* value_raw, - void*) { +void Accessors::ArrayLengthGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; HandleScope scope(isolate); - Handle<JSObject> object(object_raw, isolate); - Handle<Object> value(value_raw, isolate); + Object* object = *Utils::OpenHandle(*info.This()); + // Traverse the prototype chain until we reach an array. + JSArray* holder = FindInstanceOf<JSArray>(isolate, object); + Object* result; + if (holder != NULL) { + result = holder->length(); + } else { + result = Smi::FromInt(0); + } + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(result, isolate))); +} + +void Accessors::ArrayLengthSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<JSObject> object = Handle<JSObject>::cast( + Utils::OpenHandle(*info.This())); + Handle<Object> value = Utils::OpenHandle(*val); // This means one of the object's prototypes is a JSArray and the // object does not have a 'length' property. Calling SetProperty // causes an infinite loop. if (!object->IsJSArray()) { - Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes(object, - isolate->factory()->length_string(), value, NONE); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + MaybeHandle<Object> maybe_result = + JSObject::SetLocalPropertyIgnoreAttributes( + object, isolate->factory()->length_string(), value, NONE); + maybe_result.Check(); + return; } value = FlattenNumber(isolate, value); Handle<JSArray> array_handle = Handle<JSArray>::cast(object); - - bool has_exception; - Handle<Object> uint32_v = - Execution::ToUint32(isolate, value, &has_exception); - if (has_exception) return Failure::Exception(); - Handle<Object> number_v = - Execution::ToNumber(isolate, value, &has_exception); - if (has_exception) return Failure::Exception(); + MaybeHandle<Object> maybe; + Handle<Object> uint32_v; + maybe = Execution::ToUint32(isolate, value); + if (!maybe.ToHandle(&uint32_v)) { + isolate->OptionalRescheduleException(false); + return; + } + Handle<Object> number_v; + maybe = Execution::ToNumber(isolate, value); + if (!maybe.ToHandle(&number_v)) { + isolate->OptionalRescheduleException(false); + return; + } if (uint32_v->Number() == number_v->Number()) { - Handle<Object> result = JSArray::SetElementsLength(array_handle, uint32_v); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + maybe = JSArray::SetElementsLength(array_handle, uint32_v); + maybe.Check(); + return; } - return isolate->Throw( + + isolate->ScheduleThrow( *isolate->factory()->NewRangeError("invalid_array_length", HandleVector<Object>(NULL, 0))); } -const AccessorDescriptor Accessors::ArrayLength = { - ArrayGetLength, - ArraySetLength, - 0 -}; +Handle<AccessorInfo> Accessors::ArrayLengthInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->length_string(), + &ArrayLengthGetter, + &ArrayLengthSetter, + attributes); +} + // // Accessors::StringLength // +void Accessors::StringLengthGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* value = *Utils::OpenHandle(*info.This()); + Object* result; + if (value->IsJSValue()) value = JSValue::cast(value)->value(); + if (value->IsString()) { + result = Smi::FromInt(String::cast(value)->length()); + } else { + // If object is not a string we return 0 to be compatible with WebKit. + // Note: Firefox returns the length of ToString(object). + result = Smi::FromInt(0); + } + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(result, isolate))); +} + -MaybeObject* Accessors::StringGetLength(Isolate* isolate, - Object* object, - void*) { - Object* value = object; - if (object->IsJSValue()) value = JSValue::cast(object)->value(); - if (value->IsString()) return Smi::FromInt(String::cast(value)->length()); - // If object is not a string we return 0 to be compatible with WebKit. - // Note: Firefox returns the length of ToString(object). - return Smi::FromInt(0); +void Accessors::StringLengthSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); } -const AccessorDescriptor Accessors::StringLength = { - StringGetLength, - IllegalSetter, - 0 -}; +Handle<AccessorInfo> Accessors::StringLengthInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->length_string(), + &StringLengthGetter, + &StringLengthSetter, + attributes); +} // -// Accessors::ScriptSource +// Accessors::ScriptColumnOffset // -MaybeObject* Accessors::ScriptGetSource(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->source(); +void Accessors::ScriptColumnOffsetGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* res = Script::cast(JSValue::cast(object)->value())->column_offset(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(res, isolate))); } -const AccessorDescriptor Accessors::ScriptSource = { - ScriptGetSource, - IllegalSetter, - 0 -}; +void Accessors::ScriptColumnOffsetSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptColumnOffsetInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("column_offset"))); + return MakeAccessor(isolate, + name, + &ScriptColumnOffsetGetter, + &ScriptColumnOffsetSetter, + attributes); +} // -// Accessors::ScriptName +// Accessors::ScriptId // -MaybeObject* Accessors::ScriptGetName(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->name(); +void Accessors::ScriptIdGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* id = Script::cast(JSValue::cast(object)->value())->id(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(id, isolate))); } -const AccessorDescriptor Accessors::ScriptName = { - ScriptGetName, - IllegalSetter, - 0 -}; +void Accessors::ScriptIdSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptIdInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("id"))); + return MakeAccessor(isolate, + name, + &ScriptIdGetter, + &ScriptIdSetter, + attributes); +} // -// Accessors::ScriptId +// Accessors::ScriptName // -MaybeObject* Accessors::ScriptGetId(Isolate* isolate, Object* object, void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->id(); +void Accessors::ScriptNameGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* source = Script::cast(JSValue::cast(object)->value())->name(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(source, isolate))); } -const AccessorDescriptor Accessors::ScriptId = { - ScriptGetId, - IllegalSetter, - 0 -}; +void Accessors::ScriptNameSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptNameInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->name_string(), + &ScriptNameGetter, + &ScriptNameSetter, + attributes); +} // -// Accessors::ScriptLineOffset +// Accessors::ScriptSource // -MaybeObject* Accessors::ScriptGetLineOffset(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->line_offset(); +void Accessors::ScriptSourceGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* source = Script::cast(JSValue::cast(object)->value())->source(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(source, isolate))); } -const AccessorDescriptor Accessors::ScriptLineOffset = { - ScriptGetLineOffset, - IllegalSetter, - 0 -}; +void Accessors::ScriptSourceSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptSourceInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->source_string(), + &ScriptSourceGetter, + &ScriptSourceSetter, + attributes); +} // -// Accessors::ScriptColumnOffset +// Accessors::ScriptLineOffset // -MaybeObject* Accessors::ScriptGetColumnOffset(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->column_offset(); +void Accessors::ScriptLineOffsetGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* res = Script::cast(JSValue::cast(object)->value())->line_offset(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(res, isolate))); +} + + +void Accessors::ScriptLineOffsetSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); } -const AccessorDescriptor Accessors::ScriptColumnOffset = { - ScriptGetColumnOffset, - IllegalSetter, - 0 -}; +Handle<AccessorInfo> Accessors::ScriptLineOffsetInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("line_offset"))); + return MakeAccessor(isolate, + name, + &ScriptLineOffsetGetter, + &ScriptLineOffsetSetter, + attributes); +} // @@ -353,19 +447,36 @@ const AccessorDescriptor Accessors::ScriptColumnOffset = { // -MaybeObject* Accessors::ScriptGetType(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->type(); +void Accessors::ScriptTypeGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* res = Script::cast(JSValue::cast(object)->value())->type(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(res, isolate))); } -const AccessorDescriptor Accessors::ScriptType = { - ScriptGetType, - IllegalSetter, - 0 -}; +void Accessors::ScriptTypeSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptTypeInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("type"))); + return MakeAccessor(isolate, + name, + &ScriptTypeGetter, + &ScriptTypeSetter, + attributes); +} // @@ -373,19 +484,37 @@ const AccessorDescriptor Accessors::ScriptType = { // -MaybeObject* Accessors::ScriptGetCompilationType(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Smi::FromInt(Script::cast(script)->compilation_type()); +void Accessors::ScriptCompilationTypeGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* res = Smi::FromInt( + Script::cast(JSValue::cast(object)->value())->compilation_type()); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(res, isolate))); } -const AccessorDescriptor Accessors::ScriptCompilationType = { - ScriptGetCompilationType, - IllegalSetter, - 0 -}; +void Accessors::ScriptCompilationTypeSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptCompilationTypeInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("compilation_type"))); + return MakeAccessor(isolate, + name, + &ScriptCompilationTypeGetter, + &ScriptCompilationTypeSetter, + attributes); +} // @@ -393,13 +522,15 @@ const AccessorDescriptor Accessors::ScriptCompilationType = { // -MaybeObject* Accessors::ScriptGetLineEnds(Isolate* isolate, - Object* object, - void*) { - JSValue* wrapper = JSValue::cast(object); +void Accessors::ScriptLineEndsGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); HandleScope scope(isolate); - Handle<Script> script(Script::cast(wrapper->value()), isolate); - InitScriptLineEnds(script); + Handle<Object> object = Utils::OpenHandle(*info.This()); + Handle<Script> script( + Script::cast(Handle<JSValue>::cast(object)->value()), isolate); + Script::InitLineEnds(script); ASSERT(script->line_ends()->IsFixedArray()); Handle<FixedArray> line_ends(FixedArray::cast(script->line_ends())); // We do not want anyone to modify this array from JS. @@ -407,15 +538,28 @@ MaybeObject* Accessors::ScriptGetLineEnds(Isolate* isolate, line_ends->map() == isolate->heap()->fixed_cow_array_map()); Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(line_ends); - return *js_array; + info.GetReturnValue().Set(Utils::ToLocal(js_array)); } -const AccessorDescriptor Accessors::ScriptLineEnds = { - ScriptGetLineEnds, - IllegalSetter, - 0 -}; +void Accessors::ScriptLineEndsSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptLineEndsInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("line_ends"))); + return MakeAccessor(isolate, + name, + &ScriptLineEndsGetter, + &ScriptLineEndsSetter, + attributes); +} // @@ -423,19 +567,36 @@ const AccessorDescriptor Accessors::ScriptLineEnds = { // -MaybeObject* Accessors::ScriptGetContextData(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - return Script::cast(script)->context_data(); +void Accessors::ScriptContextDataGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + DisallowHeapAllocation no_allocation; + HandleScope scope(isolate); + Object* object = *Utils::OpenHandle(*info.This()); + Object* res = Script::cast(JSValue::cast(object)->value())->context_data(); + info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(res, isolate))); +} + + +void Accessors::ScriptContextDataSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); } -const AccessorDescriptor Accessors::ScriptContextData = { - ScriptGetContextData, - IllegalSetter, - 0 -}; +Handle<AccessorInfo> Accessors::ScriptContextDataInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("context_data"))); + return MakeAccessor(isolate, + name, + &ScriptContextDataGetter, + &ScriptContextDataSetter, + attributes); +} // @@ -443,28 +604,46 @@ const AccessorDescriptor Accessors::ScriptContextData = { // -MaybeObject* Accessors::ScriptGetEvalFromScript(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - if (!Script::cast(script)->eval_from_shared()->IsUndefined()) { +void Accessors::ScriptEvalFromScriptGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + Handle<Script> script( + Script::cast(Handle<JSValue>::cast(object)->value()), isolate); + Handle<Object> result = isolate->factory()->undefined_value(); + if (!script->eval_from_shared()->IsUndefined()) { Handle<SharedFunctionInfo> eval_from_shared( - SharedFunctionInfo::cast(Script::cast(script)->eval_from_shared())); - + SharedFunctionInfo::cast(script->eval_from_shared())); if (eval_from_shared->script()->IsScript()) { Handle<Script> eval_from_script(Script::cast(eval_from_shared->script())); - return *GetScriptWrapper(eval_from_script); + result = Script::GetWrapper(eval_from_script); } } - return isolate->heap()->undefined_value(); + + info.GetReturnValue().Set(Utils::ToLocal(result)); } -const AccessorDescriptor Accessors::ScriptEvalFromScript = { - ScriptGetEvalFromScript, - IllegalSetter, - 0 -}; +void Accessors::ScriptEvalFromScriptSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptEvalFromScriptInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("eval_from_script"))); + return MakeAccessor(isolate, + name, + &ScriptEvalFromScriptGetter, + &ScriptEvalFromScriptSetter, + attributes); +} // @@ -472,32 +651,45 @@ const AccessorDescriptor Accessors::ScriptEvalFromScript = { // -MaybeObject* Accessors::ScriptGetEvalFromScriptPosition(Isolate* isolate, - Object* object, - void*) { - Script* raw_script = Script::cast(JSValue::cast(object)->value()); +void Accessors::ScriptEvalFromScriptPositionGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); HandleScope scope(isolate); - Handle<Script> script(raw_script); - - // If this is not a script compiled through eval there is no eval position. - if (script->compilation_type() != Script::COMPILATION_TYPE_EVAL) { - return script->GetHeap()->undefined_value(); + Handle<Object> object = Utils::OpenHandle(*info.This()); + Handle<Script> script( + Script::cast(Handle<JSValue>::cast(object)->value()), isolate); + Handle<Object> result = isolate->factory()->undefined_value(); + if (script->compilation_type() == Script::COMPILATION_TYPE_EVAL) { + Handle<Code> code(SharedFunctionInfo::cast( + script->eval_from_shared())->code()); + result = Handle<Object>( + Smi::FromInt(code->SourcePosition(code->instruction_start() + + script->eval_from_instructions_offset()->value())), + isolate); } + info.GetReturnValue().Set(Utils::ToLocal(result)); +} - // Get the function from where eval was called and find the source position - // from the instruction offset. - Handle<Code> code(SharedFunctionInfo::cast( - script->eval_from_shared())->code()); - return Smi::FromInt(code->SourcePosition(code->instruction_start() + - script->eval_from_instructions_offset()->value())); + +void Accessors::ScriptEvalFromScriptPositionSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); } -const AccessorDescriptor Accessors::ScriptEvalFromScriptPosition = { - ScriptGetEvalFromScriptPosition, - IllegalSetter, - 0 -}; +Handle<AccessorInfo> Accessors::ScriptEvalFromScriptPositionInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("eval_from_script_position"))); + return MakeAccessor(isolate, + name, + &ScriptEvalFromScriptPositionGetter, + &ScriptEvalFromScriptPositionSetter, + attributes); +} // @@ -505,100 +697,96 @@ const AccessorDescriptor Accessors::ScriptEvalFromScriptPosition = { // -MaybeObject* Accessors::ScriptGetEvalFromFunctionName(Isolate* isolate, - Object* object, - void*) { - Object* script = JSValue::cast(object)->value(); - Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast( - Script::cast(script)->eval_from_shared())); - - +void Accessors::ScriptEvalFromFunctionNameGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + Handle<Script> script( + Script::cast(Handle<JSValue>::cast(object)->value()), isolate); + Handle<Object> result; + Handle<SharedFunctionInfo> shared( + SharedFunctionInfo::cast(script->eval_from_shared())); // Find the name of the function calling eval. if (!shared->name()->IsUndefined()) { - return shared->name(); + result = Handle<Object>(shared->name(), isolate); } else { - return shared->inferred_name(); + result = Handle<Object>(shared->inferred_name(), isolate); } + info.GetReturnValue().Set(Utils::ToLocal(result)); } -const AccessorDescriptor Accessors::ScriptEvalFromFunctionName = { - ScriptGetEvalFromFunctionName, - IllegalSetter, - 0 -}; +void Accessors::ScriptEvalFromFunctionNameSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> Accessors::ScriptEvalFromFunctionNameInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name(isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("eval_from_function_name"))); + return MakeAccessor(isolate, + name, + &ScriptEvalFromFunctionNameGetter, + &ScriptEvalFromFunctionNameSetter, + attributes); +} // // Accessors::FunctionPrototype // +static Handle<Object> GetFunctionPrototype(Isolate* isolate, + Handle<Object> receiver) { + Handle<JSFunction> function; + { + DisallowHeapAllocation no_allocation; + JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, *receiver); + if (function_raw == NULL) return isolate->factory()->undefined_value(); + while (!function_raw->should_have_prototype()) { + function_raw = FindInstanceOf<JSFunction>(isolate, + function_raw->GetPrototype()); + // There has to be one because we hit the getter. + ASSERT(function_raw != NULL); + } + function = Handle<JSFunction>(function_raw, isolate); + } -Handle<Object> Accessors::FunctionGetPrototype(Handle<JSFunction> function) { - CALL_HEAP_FUNCTION(function->GetIsolate(), - Accessors::FunctionGetPrototype(function->GetIsolate(), - *function, - NULL), - Object); -} - - -Handle<Object> Accessors::FunctionSetPrototype(Handle<JSFunction> function, - Handle<Object> prototype) { - ASSERT(function->should_have_prototype()); - CALL_HEAP_FUNCTION(function->GetIsolate(), - Accessors::FunctionSetPrototype(function->GetIsolate(), - *function, - *prototype, - NULL), - Object); -} - - -MaybeObject* Accessors::FunctionGetPrototype(Isolate* isolate, - Object* object, - void*) { - JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, object); - if (function_raw == NULL) return isolate->heap()->undefined_value(); - while (!function_raw->should_have_prototype()) { - function_raw = FindInstanceOf<JSFunction>(isolate, - function_raw->GetPrototype()); - // There has to be one because we hit the getter. - ASSERT(function_raw != NULL); - } - - if (!function_raw->has_prototype()) { - HandleScope scope(isolate); - Handle<JSFunction> function(function_raw); + if (!function->has_prototype()) { Handle<Object> proto = isolate->factory()->NewFunctionPrototype(function); JSFunction::SetPrototype(function, proto); - function_raw = *function; } - return function_raw->prototype(); + return Handle<Object>(function->prototype(), isolate); } -MaybeObject* Accessors::FunctionSetPrototype(Isolate* isolate, - JSObject* object_raw, - Object* value_raw, - void*) { - JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, object_raw); - if (function_raw == NULL) return isolate->heap()->undefined_value(); +static Handle<Object> SetFunctionPrototype(Isolate* isolate, + Handle<JSObject> receiver, + Handle<Object> value) { + Handle<JSFunction> function; + { + DisallowHeapAllocation no_allocation; + JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, *receiver); + if (function_raw == NULL) return isolate->factory()->undefined_value(); + function = Handle<JSFunction>(function_raw, isolate); + } - HandleScope scope(isolate); - Handle<JSFunction> function(function_raw, isolate); - Handle<JSObject> object(object_raw, isolate); - Handle<Object> value(value_raw, isolate); if (!function->should_have_prototype()) { // Since we hit this accessor, object will have no prototype property. - Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes(object, - isolate->factory()->prototype_string(), value, NONE); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + MaybeHandle<Object> maybe_result = + JSObject::SetLocalPropertyIgnoreAttributes( + receiver, isolate->factory()->prototype_string(), value, NONE); + return maybe_result.ToHandleChecked(); } Handle<Object> old_value; - bool is_observed = *function == *object && function->map()->is_observed(); + bool is_observed = *function == *receiver && function->map()->is_observed(); if (is_observed) { if (function->has_prototype()) old_value = handle(function->prototype(), isolate); @@ -614,15 +802,56 @@ MaybeObject* Accessors::FunctionSetPrototype(Isolate* isolate, function, "update", isolate->factory()->prototype_string(), old_value); } - return *function; + return function; } -const AccessorDescriptor Accessors::FunctionPrototype = { - FunctionGetPrototype, - FunctionSetPrototype, - 0 -}; +Handle<Object> Accessors::FunctionGetPrototype(Handle<JSFunction> function) { + return GetFunctionPrototype(function->GetIsolate(), function); +} + + +Handle<Object> Accessors::FunctionSetPrototype(Handle<JSFunction> function, + Handle<Object> prototype) { + ASSERT(function->should_have_prototype()); + Isolate* isolate = function->GetIsolate(); + return SetFunctionPrototype(isolate, function, prototype); +} + + +void Accessors::FunctionPrototypeGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + Handle<Object> result = GetFunctionPrototype(isolate, object); + info.GetReturnValue().Set(Utils::ToLocal(result)); +} + + +void Accessors::FunctionPrototypeSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<JSObject> object = + Handle<JSObject>::cast(Utils::OpenHandle(*info.This())); + Handle<Object> value = Utils::OpenHandle(*val); + + SetFunctionPrototype(isolate, object, value); +} + + +Handle<AccessorInfo> Accessors::FunctionPrototypeInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->prototype_string(), + &FunctionPrototypeGetter, + &FunctionPrototypeSetter, + attributes); +} // @@ -630,31 +859,57 @@ const AccessorDescriptor Accessors::FunctionPrototype = { // -MaybeObject* Accessors::FunctionGetLength(Isolate* isolate, - Object* object, - void*) { - JSFunction* function = FindInstanceOf<JSFunction>(isolate, object); - if (function == NULL) return Smi::FromInt(0); - // Check if already compiled. - if (function->shared()->is_compiled()) { - return Smi::FromInt(function->shared()->length()); - } - // If the function isn't compiled yet, the length is not computed correctly - // yet. Compile it now and return the right length. +void Accessors::FunctionLengthGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); HandleScope scope(isolate); - Handle<JSFunction> function_handle(function); - if (Compiler::EnsureCompiled(function_handle, KEEP_EXCEPTION)) { - return Smi::FromInt(function_handle->shared()->length()); + Handle<Object> object = Utils::OpenHandle(*info.This()); + MaybeHandle<JSFunction> maybe_function; + + { + DisallowHeapAllocation no_allocation; + JSFunction* function = FindInstanceOf<JSFunction>(isolate, *object); + if (function != NULL) maybe_function = Handle<JSFunction>(function); } - return Failure::Exception(); + + int length = 0; + Handle<JSFunction> function; + if (maybe_function.ToHandle(&function)) { + if (function->shared()->is_compiled()) { + length = function->shared()->length(); + } else { + // If the function isn't compiled yet, the length is not computed + // correctly yet. Compile it now and return the right length. + if (Compiler::EnsureCompiled(function, KEEP_EXCEPTION)) { + length = function->shared()->length(); + } + if (isolate->has_pending_exception()) { + isolate->OptionalRescheduleException(false); + } + } + } + Handle<Object> result(Smi::FromInt(length), isolate); + info.GetReturnValue().Set(Utils::ToLocal(result)); } -const AccessorDescriptor Accessors::FunctionLength = { - FunctionGetLength, - ReadOnlySetAccessor, - 0 -}; +void Accessors::FunctionLengthSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + // Do nothing. +} + + +Handle<AccessorInfo> Accessors::FunctionLengthInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->length_string(), + &FunctionLengthGetter, + &FunctionLengthSetter, + attributes); +} // @@ -662,21 +917,47 @@ const AccessorDescriptor Accessors::FunctionLength = { // -MaybeObject* Accessors::FunctionGetName(Isolate* isolate, - Object* object, - void*) { - JSFunction* holder = FindInstanceOf<JSFunction>(isolate, object); - return holder == NULL - ? isolate->heap()->undefined_value() - : holder->shared()->name(); +void Accessors::FunctionNameGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + MaybeHandle<JSFunction> maybe_function; + + { + DisallowHeapAllocation no_allocation; + JSFunction* function = FindInstanceOf<JSFunction>(isolate, *object); + if (function != NULL) maybe_function = Handle<JSFunction>(function); + } + + Handle<JSFunction> function; + Handle<Object> result; + if (maybe_function.ToHandle(&function)) { + result = Handle<Object>(function->shared()->name(), isolate); + } else { + result = isolate->factory()->undefined_value(); + } + info.GetReturnValue().Set(Utils::ToLocal(result)); } -const AccessorDescriptor Accessors::FunctionName = { - FunctionGetName, - ReadOnlySetAccessor, - 0 -}; +void Accessors::FunctionNameSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + // Do nothing. +} + + +Handle<AccessorInfo> Accessors::FunctionNameInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->name_string(), + &FunctionNameGetter, + &FunctionNameSetter, + attributes); +} // @@ -684,16 +965,7 @@ const AccessorDescriptor Accessors::FunctionName = { // -Handle<Object> Accessors::FunctionGetArguments(Handle<JSFunction> function) { - CALL_HEAP_FUNCTION(function->GetIsolate(), - Accessors::FunctionGetArguments(function->GetIsolate(), - *function, - NULL), - Object); -} - - -static MaybeObject* ConstructArgumentsObjectForInlinedFunction( +static Handle<Object> ArgumentsForInlinedFunction( JavaScriptFrame* frame, Handle<JSFunction> inlined_function, int inlined_frame_index) { @@ -717,81 +989,124 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction( arguments->set_elements(*array); // Return the freshly allocated arguments object. - return *arguments; + return arguments; } -MaybeObject* Accessors::FunctionGetArguments(Isolate* isolate, - Object* object, - void*) { - HandleScope scope(isolate); - JSFunction* holder = FindInstanceOf<JSFunction>(isolate, object); - if (holder == NULL) return isolate->heap()->undefined_value(); - Handle<JSFunction> function(holder, isolate); +static int FindFunctionInFrame(JavaScriptFrame* frame, + Handle<JSFunction> function) { + DisallowHeapAllocation no_allocation; + List<JSFunction*> functions(2); + frame->GetFunctions(&functions); + for (int i = functions.length() - 1; i >= 0; i--) { + if (functions[i] == *function) return i; + } + return -1; +} + + +Handle<Object> GetFunctionArguments(Isolate* isolate, + Handle<JSFunction> function) { + if (function->shared()->native()) return isolate->factory()->null_value(); - if (function->shared()->native()) return isolate->heap()->null_value(); // Find the top invocation of the function by traversing frames. - List<JSFunction*> functions(2); for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { JavaScriptFrame* frame = it.frame(); - frame->GetFunctions(&functions); - for (int i = functions.length() - 1; i >= 0; i--) { - // Skip all frames that aren't invocations of the given function. - if (functions[i] != *function) continue; - - if (i > 0) { - // The function in question was inlined. Inlined functions have the - // correct number of arguments and no allocated arguments object, so - // we can construct a fresh one by interpreting the function's - // deoptimization input data. - return ConstructArgumentsObjectForInlinedFunction(frame, function, i); - } + int function_index = FindFunctionInFrame(frame, function); + if (function_index < 0) continue; + + if (function_index > 0) { + // The function in question was inlined. Inlined functions have the + // correct number of arguments and no allocated arguments object, so + // we can construct a fresh one by interpreting the function's + // deoptimization input data. + return ArgumentsForInlinedFunction(frame, function, function_index); + } - if (!frame->is_optimized()) { - // If there is an arguments variable in the stack, we return that. - Handle<ScopeInfo> scope_info(function->shared()->scope_info()); - int index = scope_info->StackSlotIndex( - isolate->heap()->arguments_string()); - if (index >= 0) { - Handle<Object> arguments(frame->GetExpression(index), isolate); - if (!arguments->IsArgumentsMarker()) return *arguments; - } + if (!frame->is_optimized()) { + // If there is an arguments variable in the stack, we return that. + Handle<ScopeInfo> scope_info(function->shared()->scope_info()); + int index = scope_info->StackSlotIndex( + isolate->heap()->arguments_string()); + if (index >= 0) { + Handle<Object> arguments(frame->GetExpression(index), isolate); + if (!arguments->IsArgumentsMarker()) return arguments; } - - // If there is no arguments variable in the stack or we have an - // optimized frame, we find the frame that holds the actual arguments - // passed to the function. - it.AdvanceToArgumentsFrame(); - frame = it.frame(); - - // Get the number of arguments and construct an arguments object - // mirror for the right frame. - const int length = frame->ComputeParametersCount(); - Handle<JSObject> arguments = isolate->factory()->NewArgumentsObject( - function, length); - Handle<FixedArray> array = isolate->factory()->NewFixedArray(length); - - // Copy the parameters to the arguments object. - ASSERT(array->length() == length); - for (int i = 0; i < length; i++) array->set(i, frame->GetParameter(i)); - arguments->set_elements(*array); - - // Return the freshly allocated arguments object. - return *arguments; } - functions.Rewind(0); + + // If there is no arguments variable in the stack or we have an + // optimized frame, we find the frame that holds the actual arguments + // passed to the function. + it.AdvanceToArgumentsFrame(); + frame = it.frame(); + + // Get the number of arguments and construct an arguments object + // mirror for the right frame. + const int length = frame->ComputeParametersCount(); + Handle<JSObject> arguments = isolate->factory()->NewArgumentsObject( + function, length); + Handle<FixedArray> array = isolate->factory()->NewFixedArray(length); + + // Copy the parameters to the arguments object. + ASSERT(array->length() == length); + for (int i = 0; i < length; i++) array->set(i, frame->GetParameter(i)); + arguments->set_elements(*array); + + // Return the freshly allocated arguments object. + return arguments; } // No frame corresponding to the given function found. Return null. - return isolate->heap()->null_value(); + return isolate->factory()->null_value(); } -const AccessorDescriptor Accessors::FunctionArguments = { - FunctionGetArguments, - ReadOnlySetAccessor, - 0 -}; +Handle<Object> Accessors::FunctionGetArguments(Handle<JSFunction> function) { + return GetFunctionArguments(function->GetIsolate(), function); +} + + +void Accessors::FunctionArgumentsGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + MaybeHandle<JSFunction> maybe_function; + + { + DisallowHeapAllocation no_allocation; + JSFunction* function = FindInstanceOf<JSFunction>(isolate, *object); + if (function != NULL) maybe_function = Handle<JSFunction>(function); + } + + Handle<JSFunction> function; + Handle<Object> result; + if (maybe_function.ToHandle(&function)) { + result = GetFunctionArguments(isolate, function); + } else { + result = isolate->factory()->undefined_value(); + } + info.GetReturnValue().Set(Utils::ToLocal(result)); +} + + +void Accessors::FunctionArgumentsSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + // Do nothing. +} + + +Handle<AccessorInfo> Accessors::FunctionArgumentsInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->arguments_string(), + &FunctionArgumentsGetter, + &FunctionArgumentsSetter, + attributes); +} // @@ -845,29 +1160,23 @@ class FrameFunctionIterator { }; -MaybeObject* Accessors::FunctionGetCaller(Isolate* isolate, - Object* object, - void*) { - HandleScope scope(isolate); +MaybeHandle<JSFunction> FindCaller(Isolate* isolate, + Handle<JSFunction> function) { DisallowHeapAllocation no_allocation; - JSFunction* holder = FindInstanceOf<JSFunction>(isolate, object); - if (holder == NULL) return isolate->heap()->undefined_value(); - if (holder->shared()->native()) return isolate->heap()->null_value(); - Handle<JSFunction> function(holder, isolate); - FrameFunctionIterator it(isolate, no_allocation); - + if (function->shared()->native()) { + return MaybeHandle<JSFunction>(); + } // Find the function from the frames. if (!it.Find(*function)) { // No frame corresponding to the given function found. Return null. - return isolate->heap()->null_value(); + return MaybeHandle<JSFunction>(); } - // Find previously called non-toplevel function. JSFunction* caller; do { caller = it.next(); - if (caller == NULL) return isolate->heap()->null_value(); + if (caller == NULL) return MaybeHandle<JSFunction>(); } while (caller->shared()->is_toplevel()); // If caller is a built-in function and caller's caller is also built-in, @@ -884,24 +1193,64 @@ MaybeObject* Accessors::FunctionGetCaller(Isolate* isolate, // allows us to make bound functions use the strict function map // and its associated throwing caller and arguments. if (caller->shared()->bound()) { - return isolate->heap()->null_value(); + return MaybeHandle<JSFunction>(); } // Censor if the caller is not a sloppy mode function. // Change from ES5, which used to throw, see: // https://bugs.ecmascript.org/show_bug.cgi?id=310 if (caller->shared()->strict_mode() == STRICT) { - return isolate->heap()->null_value(); + return MaybeHandle<JSFunction>(); + } + return Handle<JSFunction>(caller); +} + + +void Accessors::FunctionCallerGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + Handle<Object> object = Utils::OpenHandle(*info.This()); + MaybeHandle<JSFunction> maybe_function; + { + DisallowHeapAllocation no_allocation; + JSFunction* function = FindInstanceOf<JSFunction>(isolate, *object); + if (function != NULL) maybe_function = Handle<JSFunction>(function); + } + Handle<JSFunction> function; + Handle<Object> result; + if (maybe_function.ToHandle(&function)) { + MaybeHandle<JSFunction> maybe_caller; + maybe_caller = FindCaller(isolate, function); + Handle<JSFunction> caller; + if (maybe_caller.ToHandle(&caller)) { + result = caller; + } else { + result = isolate->factory()->null_value(); + } + } else { + result = isolate->factory()->undefined_value(); } + info.GetReturnValue().Set(Utils::ToLocal(result)); +} + - return caller; +void Accessors::FunctionCallerSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> val, + const v8::PropertyCallbackInfo<void>& info) { + // Do nothing. } -const AccessorDescriptor Accessors::FunctionCaller = { - FunctionGetCaller, - ReadOnlySetAccessor, - 0 -}; +Handle<AccessorInfo> Accessors::FunctionCallerInfo( + Isolate* isolate, PropertyAttributes attributes) { + return MakeAccessor(isolate, + isolate->factory()->caller_string(), + &FunctionCallerGetter, + &FunctionCallerSetter, + attributes); +} // diff --git a/deps/v8/src/accessors.h b/deps/v8/src/accessors.h index 83a847222..8c006e93a 100644 --- a/deps/v8/src/accessors.h +++ b/deps/v8/src/accessors.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ACCESSORS_H_ #define V8_ACCESSORS_H_ @@ -36,42 +13,52 @@ namespace internal { // The list of accessor descriptors. This is a second-order macro // taking a macro to be applied to all accessor descriptor names. -#define ACCESSOR_DESCRIPTOR_LIST(V) \ - V(FunctionPrototype) \ - V(FunctionLength) \ - V(FunctionName) \ +#define ACCESSOR_INFO_LIST(V) \ + V(ArrayLength) \ V(FunctionArguments) \ V(FunctionCaller) \ - V(ArrayLength) \ - V(StringLength) \ - V(ScriptSource) \ - V(ScriptName) \ - V(ScriptId) \ - V(ScriptLineOffset) \ + V(FunctionName) \ + V(FunctionLength) \ + V(FunctionPrototype) \ V(ScriptColumnOffset) \ - V(ScriptType) \ V(ScriptCompilationType) \ - V(ScriptLineEnds) \ V(ScriptContextData) \ V(ScriptEvalFromScript) \ V(ScriptEvalFromScriptPosition) \ - V(ScriptEvalFromFunctionName) + V(ScriptEvalFromFunctionName) \ + V(ScriptId) \ + V(ScriptLineEnds) \ + V(ScriptLineOffset) \ + V(ScriptName) \ + V(ScriptSource) \ + V(ScriptType) \ + V(StringLength) // Accessors contains all predefined proxy accessors. class Accessors : public AllStatic { public: // Accessor descriptors. -#define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ - static const AccessorDescriptor name; - ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) -#undef ACCESSOR_DESCRIPTOR_DECLARATION +#define ACCESSOR_INFO_DECLARATION(name) \ + static void name##Getter( \ + v8::Local<v8::String> name, \ + const v8::PropertyCallbackInfo<v8::Value>& info); \ + static void name##Setter( \ + v8::Local<v8::String> name, \ + v8::Local<v8::Value> value, \ + const v8::PropertyCallbackInfo<void>& info); \ + static Handle<AccessorInfo> name##Info( \ + Isolate* isolate, \ + PropertyAttributes attributes); + ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION) +#undef ACCESSOR_INFO_DECLARATION enum DescriptorId { -#define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ - k##name, - ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) -#undef ACCESSOR_DESCRIPTOR_DECLARATION +#define ACCESSOR_INFO_DECLARATION(name) \ + k##name##Getter, \ + k##name##Setter, + ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION) +#undef ACCESSOR_INFO_DECLARATION descriptorCount }; @@ -92,72 +79,16 @@ class Accessors : public AllStatic { Handle<String> name, int* object_offset); + static Handle<AccessorInfo> MakeAccessor( + Isolate* isolate, + Handle<String> name, + AccessorGetterCallback getter, + AccessorSetterCallback setter, + PropertyAttributes attributes); private: - // Accessor functions only used through the descriptor. - static MaybeObject* FunctionSetPrototype(Isolate* isolate, - JSObject* object, - Object*, - void*); - static MaybeObject* FunctionGetPrototype(Isolate* isolate, - Object* object, - void*); - static MaybeObject* FunctionGetLength(Isolate* isolate, - Object* object, - void*); - static MaybeObject* FunctionGetName(Isolate* isolate, Object* object, void*); - static MaybeObject* FunctionGetArguments(Isolate* isolate, - Object* object, - void*); - static MaybeObject* FunctionGetCaller(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ArraySetLength(Isolate* isolate, - JSObject* object, - Object*, - void*); - static MaybeObject* ArrayGetLength(Isolate* isolate, Object* object, void*); - static MaybeObject* StringGetLength(Isolate* isolate, Object* object, void*); - static MaybeObject* ScriptGetName(Isolate* isolate, Object* object, void*); - static MaybeObject* ScriptGetId(Isolate* isolate, Object* object, void*); - static MaybeObject* ScriptGetSource(Isolate* isolate, Object* object, void*); - static MaybeObject* ScriptGetLineOffset(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetColumnOffset(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetType(Isolate* isolate, Object* object, void*); - static MaybeObject* ScriptGetCompilationType(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetLineEnds(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetContextData(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetEvalFromScript(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetEvalFromScriptPosition(Isolate* isolate, - Object* object, - void*); - static MaybeObject* ScriptGetEvalFromFunctionName(Isolate* isolate, - Object* object, - void*); - // Helper functions. static Handle<Object> FlattenNumber(Isolate* isolate, Handle<Object> value); - static MaybeObject* IllegalSetter(Isolate* isolate, - JSObject*, - Object*, - void*); - static Object* IllegalGetAccessor(Isolate* isolate, Object* object, void*); - static MaybeObject* ReadOnlySetAccessor(Isolate* isolate, - JSObject*, - Object* value, - void*); }; } } // namespace v8::internal diff --git a/deps/v8/src/allocation-site-scopes.cc b/deps/v8/src/allocation-site-scopes.cc index bbfb39b12..51392fac8 100644 --- a/deps/v8/src/allocation-site-scopes.cc +++ b/deps/v8/src/allocation-site-scopes.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "allocation-site-scopes.h" @@ -62,7 +39,7 @@ Handle<AllocationSite> AllocationSiteCreationContext::EnterNewScope() { void AllocationSiteCreationContext::ExitScope( Handle<AllocationSite> scope_site, Handle<JSObject> object) { - if (!object.is_null() && !object->IsFailure()) { + if (!object.is_null()) { bool top_level = !scope_site.is_null() && top().is_identical_to(scope_site); diff --git a/deps/v8/src/allocation-site-scopes.h b/deps/v8/src/allocation-site-scopes.h index a195b27d8..1ffe004e1 100644 --- a/deps/v8/src/allocation-site-scopes.h +++ b/deps/v8/src/allocation-site-scopes.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ALLOCATION_SITE_SCOPES_H_ #define V8_ALLOCATION_SITE_SCOPES_H_ diff --git a/deps/v8/src/allocation-tracker.cc b/deps/v8/src/allocation-tracker.cc index a9103a84a..f5d7e0c9d 100644 --- a/deps/v8/src/allocation-tracker.cc +++ b/deps/v8/src/allocation-tracker.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -211,11 +188,6 @@ void AddressToTraceMap::RemoveRange(Address start, Address end) { } -static bool AddressesMatch(void* key1, void* key2) { - return key1 == key2; -} - - void AllocationTracker::DeleteFunctionInfo(FunctionInfo** info) { delete *info; } @@ -225,7 +197,7 @@ AllocationTracker::AllocationTracker( HeapObjectsMap* ids, StringsStorage* names) : ids_(ids), names_(names), - id_to_function_info_index_(AddressesMatch), + id_to_function_info_index_(HashMap::PointersMatch), info_index_for_other_state_(0) { FunctionInfo* info = new FunctionInfo(); info->name = "(root)"; @@ -354,8 +326,8 @@ AllocationTracker::UnresolvedLocation::~UnresolvedLocation() { void AllocationTracker::UnresolvedLocation::Resolve() { if (script_.is_null()) return; HandleScope scope(script_->GetIsolate()); - info_->line = GetScriptLineNumber(script_, start_position_); - info_->column = GetScriptColumnNumber(script_, start_position_); + info_->line = Script::GetLineNumber(script_, start_position_); + info_->column = Script::GetColumnNumber(script_, start_position_); } diff --git a/deps/v8/src/allocation-tracker.h b/deps/v8/src/allocation-tracker.h index b876d7d14..f3788b91a 100644 --- a/deps/v8/src/allocation-tracker.h +++ b/deps/v8/src/allocation-tracker.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ALLOCATION_TRACKER_H_ #define V8_ALLOCATION_TRACKER_H_ diff --git a/deps/v8/src/allocation.cc b/deps/v8/src/allocation.cc index ff16dab3c..0549a199f 100644 --- a/deps/v8/src/allocation.cc +++ b/deps/v8/src/allocation.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "allocation.h" diff --git a/deps/v8/src/allocation.h b/deps/v8/src/allocation.h index 380fa05ff..13d08a816 100644 --- a/deps/v8/src/allocation.h +++ b/deps/v8/src/allocation.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ALLOCATION_H_ #define V8_ALLOCATION_H_ diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 5dcf59229..8a99c278c 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "api.h" @@ -106,7 +83,7 @@ namespace v8 { #define EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, value) \ EXCEPTION_BAILOUT_CHECK_GENERIC( \ - isolate, value, i::V8::FireCallCompletedCallback(isolate);) + isolate, value, isolate->FireCallCompletedCallback();) #define EXCEPTION_BAILOUT_CHECK(isolate, value) \ @@ -244,24 +221,6 @@ static inline bool EnsureInitializedForIsolate(i::Isolate* isolate, } -// Some initializing API functions are called early and may be -// called on a thread different from static initializer thread. -// If Isolate API is used, Isolate::Enter() will initialize TLS so -// Isolate::Current() works. If it's a legacy case, then the thread -// may not have TLS initialized yet. However, in initializing APIs it -// may be too early to call EnsureInitialized() - some pre-init -// parameters still have to be configured. -static inline i::Isolate* EnterIsolateIfNeeded() { - i::Isolate* isolate = i::Isolate::UncheckedCurrent(); - if (isolate != NULL) - return isolate; - - i::Isolate::EnterDefaultIsolate(); - isolate = i::Isolate::Current(); - return isolate; -} - - StartupDataDecompressor::StartupDataDecompressor() : raw_data(i::NewArray<char*>(V8::GetCompressedStartupDataCount())) { for (int i = 0; i < V8::GetCompressedStartupDataCount(); ++i) { @@ -391,14 +350,14 @@ void V8::SetDecompressedStartupData(StartupData* decompressed_data) { void V8::SetFatalErrorHandler(FatalErrorCallback that) { - i::Isolate* isolate = EnterIsolateIfNeeded(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); isolate->set_exception_behavior(that); } void V8::SetAllowCodeGenerationFromStringsCallback( AllowCodeGenerationFromStringsCallback callback) { - i::Isolate* isolate = EnterIsolateIfNeeded(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); isolate->set_allow_code_gen_callback(callback); } @@ -460,15 +419,16 @@ Extension::Extension(const char* name, ResourceConstraints::ResourceConstraints() - : max_young_space_size_(0), + : max_new_space_size_(0), max_old_space_size_(0), max_executable_size_(0), stack_limit_(NULL), - max_available_threads_(0) { } + max_available_threads_(0), + code_range_size_(0) { } void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory, + uint64_t virtual_memory_limit, uint32_t number_of_processors) { - const int lump_of_memory = (i::kPointerSize / 4) * i::MB; #if V8_OS_ANDROID // Android has higher physical memory requirements before raising the maximum // heap size limits since it has no swap space. @@ -481,42 +441,50 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory, const uint64_t high_limit = 1ul * i::GB; #endif - // The young_space_size should be a power of 2 and old_generation_size should - // be a multiple of Page::kPageSize. if (physical_memory <= low_limit) { - set_max_young_space_size(2 * lump_of_memory); - set_max_old_space_size(128 * lump_of_memory); - set_max_executable_size(96 * lump_of_memory); + set_max_new_space_size(i::Heap::kMaxNewSpaceSizeLowMemoryDevice); + set_max_old_space_size(i::Heap::kMaxOldSpaceSizeLowMemoryDevice); + set_max_executable_size(i::Heap::kMaxExecutableSizeLowMemoryDevice); } else if (physical_memory <= medium_limit) { - set_max_young_space_size(8 * lump_of_memory); - set_max_old_space_size(256 * lump_of_memory); - set_max_executable_size(192 * lump_of_memory); + set_max_new_space_size(i::Heap::kMaxNewSpaceSizeMediumMemoryDevice); + set_max_old_space_size(i::Heap::kMaxOldSpaceSizeMediumMemoryDevice); + set_max_executable_size(i::Heap::kMaxExecutableSizeMediumMemoryDevice); } else if (physical_memory <= high_limit) { - set_max_young_space_size(16 * lump_of_memory); - set_max_old_space_size(512 * lump_of_memory); - set_max_executable_size(256 * lump_of_memory); + set_max_new_space_size(i::Heap::kMaxNewSpaceSizeHighMemoryDevice); + set_max_old_space_size(i::Heap::kMaxOldSpaceSizeHighMemoryDevice); + set_max_executable_size(i::Heap::kMaxExecutableSizeHighMemoryDevice); } else { - set_max_young_space_size(16 * lump_of_memory); - set_max_old_space_size(700 * lump_of_memory); - set_max_executable_size(256 * lump_of_memory); + set_max_new_space_size(i::Heap::kMaxNewSpaceSizeHugeMemoryDevice); + set_max_old_space_size(i::Heap::kMaxOldSpaceSizeHugeMemoryDevice); + set_max_executable_size(i::Heap::kMaxExecutableSizeHugeMemoryDevice); } set_max_available_threads(i::Max(i::Min(number_of_processors, 4u), 1u)); + + if (virtual_memory_limit > 0 && i::kIs64BitArch) { + // Reserve no more than 1/8 of the memory for the code range, but at most + // 512 MB. + set_code_range_size( + i::Min(512 * i::MB, static_cast<int>(virtual_memory_limit >> 3))); + } } bool SetResourceConstraints(Isolate* v8_isolate, ResourceConstraints* constraints) { i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); - int young_space_size = constraints->max_young_space_size(); - int old_gen_size = constraints->max_old_space_size(); + int new_space_size = constraints->max_new_space_size(); + int old_space_size = constraints->max_old_space_size(); int max_executable_size = constraints->max_executable_size(); - if (young_space_size != 0 || old_gen_size != 0 || max_executable_size != 0) { + int code_range_size = constraints->code_range_size(); + if (new_space_size != 0 || old_space_size != 0 || max_executable_size != 0 || + code_range_size != 0) { // After initialization it's too late to change Heap constraints. ASSERT(!isolate->IsInitialized()); - bool result = isolate->heap()->ConfigureHeap(young_space_size / 2, - old_gen_size, - max_executable_size); + bool result = isolate->heap()->ConfigureHeap(new_space_size / 2, + old_space_size, + max_executable_size, + code_range_size); if (!result) return false; } if (constraints->stack_limit() != NULL) { @@ -533,7 +501,7 @@ i::Object** V8::GlobalizeReference(i::Isolate* isolate, i::Object** obj) { LOG_API(isolate, "Persistent::New"); i::Handle<i::Object> result = isolate->global_handles()->Create(*obj); #ifdef DEBUG - (*obj)->Verify(); + (*obj)->ObjectVerify(); #endif // DEBUG return result.location(); } @@ -542,7 +510,7 @@ i::Object** V8::GlobalizeReference(i::Isolate* isolate, i::Object** obj) { i::Object** V8::CopyPersistent(i::Object** obj) { i::Handle<i::Object> result = i::GlobalHandles::CopyGlobal(obj); #ifdef DEBUG - (*obj)->Verify(); + (*obj)->ObjectVerify(); #endif // DEBUG return result.location(); } @@ -703,7 +671,7 @@ static i::Handle<i::FixedArray> EmbedderDataFor(Context* context, return i::Handle<i::FixedArray>(); } int new_size = i::Max(index, data->length() << 1) + 1; - data = env->GetIsolate()->factory()->CopySizeFixedArray(data, new_size); + data = i::FixedArray::CopySize(data, new_size); env->set_embedder_data(*data); return data; } @@ -1567,45 +1535,6 @@ void ObjectTemplate::SetInternalFieldCount(int value) { } -// --- S c r i p t D a t a --- - - -ScriptData* ScriptData::PreCompile(v8::Handle<String> source) { - i::Handle<i::String> str = Utils::OpenHandle(*source); - i::Isolate* isolate = str->GetIsolate(); - if (str->IsExternalTwoByteString()) { - i::ExternalTwoByteStringUtf16CharacterStream stream( - i::Handle<i::ExternalTwoByteString>::cast(str), 0, str->length()); - return i::PreParserApi::PreParse(isolate, &stream); - } else { - i::GenericStringUtf16CharacterStream stream(str, 0, str->length()); - return i::PreParserApi::PreParse(isolate, &stream); - } -} - - -ScriptData* ScriptData::New(const char* data, int length) { - // Return an empty ScriptData if the length is obviously invalid. - if (length % sizeof(unsigned) != 0) { - return new i::ScriptDataImpl(); - } - - // Copy the data to ensure it is properly aligned. - int deserialized_data_length = length / sizeof(unsigned); - // If aligned, don't create a copy of the data. - if (reinterpret_cast<intptr_t>(data) % sizeof(unsigned) == 0) { - return new i::ScriptDataImpl(data, length); - } - // Copy the data to align it. - unsigned* deserialized_data = i::NewArray<unsigned>(deserialized_data_length); - i::CopyBytes(reinterpret_cast<char*>(deserialized_data), - data, static_cast<size_t>(length)); - - return new i::ScriptDataImpl( - i::Vector<unsigned>(deserialized_data, deserialized_data_length)); -} - - // --- S c r i p t s --- @@ -1660,7 +1589,7 @@ int UnboundScript::GetLineNumber(int code_pos) { LOG_API(isolate, "UnboundScript::GetLineNumber"); if (obj->IsScript()) { i::Handle<i::Script> script(i::Script::cast(*obj)); - return i::GetScriptLineNumber(script, code_pos); + return i::Script::GetLineNumber(script, code_pos); } else { return -1; } @@ -1695,21 +1624,16 @@ Local<Value> Script::Run() { ENTER_V8(isolate); i::Logger::TimerEventScope timer_scope( isolate, i::Logger::TimerEventScope::v8_execute); - i::Object* raw_result = NULL; - { - i::HandleScope scope(isolate); - i::Handle<i::JSFunction> fun = - i::Handle<i::JSFunction>(i::JSFunction::cast(*obj), isolate); - EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> receiver( - isolate->context()->global_proxy(), isolate); - i::Handle<i::Object> result = i::Execution::Call( - isolate, fun, receiver, 0, NULL, &has_pending_exception); - EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Value>()); - raw_result = *result; - } - i::Handle<i::Object> result(raw_result, isolate); - return Utils::ToLocal(result); + i::HandleScope scope(isolate); + i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(obj); + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::Object> receiver( + isolate->context()->global_proxy(), isolate); + i::Handle<i::Object> result; + has_pending_exception = !i::Execution::Call( + isolate, fun, receiver, 0, NULL).ToHandle(&result); + EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Value>()); + return Utils::ToLocal(scope.CloseAndEscape(result)); } @@ -1724,42 +1648,46 @@ Local<UnboundScript> ScriptCompiler::CompileUnbound( Isolate* v8_isolate, Source* source, CompileOptions options) { - i::ScriptDataImpl* script_data_impl = NULL; + i::ScriptData* script_data_impl = NULL; i::CachedDataMode cached_data_mode = i::NO_CACHED_DATA; + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); + ON_BAILOUT(isolate, "v8::ScriptCompiler::CompileUnbound()", + return Local<UnboundScript>()); if (options & kProduceDataToCache) { cached_data_mode = i::PRODUCE_CACHED_DATA; ASSERT(source->cached_data == NULL); if (source->cached_data) { // Asked to produce cached data even though there is some already -> not - // good. In release mode, try to do the right thing: Just regenerate the - // data. - delete source->cached_data; - source->cached_data = NULL; + // good. Fail the compilation. + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::Object> result = isolate->factory()->NewSyntaxError( + "invalid_cached_data", isolate->factory()->NewJSArray(0)); + isolate->Throw(*result); + isolate->ReportPendingMessages(); + has_pending_exception = true; + EXCEPTION_BAILOUT_CHECK(isolate, Local<UnboundScript>()); } } else if (source->cached_data) { - // FIXME(marja): Make compiler use CachedData directly. Aligning needs to be - // taken care of. - script_data_impl = static_cast<i::ScriptDataImpl*>(ScriptData::New( + cached_data_mode = i::CONSUME_CACHED_DATA; + // ScriptData takes care of aligning, in case the data is not aligned + // correctly. + script_data_impl = i::ScriptData::New( reinterpret_cast<const char*>(source->cached_data->data), - source->cached_data->length)); - // We assert that the pre-data is sane, even though we can actually - // handle it if it turns out not to be in release mode. - ASSERT(script_data_impl->SanityCheck()); - if (script_data_impl->SanityCheck()) { - cached_data_mode = i::CONSUME_CACHED_DATA; - } else { - // If the pre-data isn't sane we simply ignore it. + source->cached_data->length); + // If the cached data is not valid, fail the compilation. + if (script_data_impl == NULL || !script_data_impl->SanityCheck()) { + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::Object> result = isolate->factory()->NewSyntaxError( + "invalid_cached_data", isolate->factory()->NewJSArray(0)); + isolate->Throw(*result); + isolate->ReportPendingMessages(); delete script_data_impl; - script_data_impl = NULL; - delete source->cached_data; - source->cached_data = NULL; + has_pending_exception = true; + EXCEPTION_BAILOUT_CHECK(isolate, Local<UnboundScript>()); } } i::Handle<i::String> str = Utils::OpenHandle(*(source->source_string)); - i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); - ON_BAILOUT(isolate, "v8::ScriptCompiler::CompileUnbound()", - return Local<UnboundScript>()); LOG_API(isolate, "ScriptCompiler::CompileUnbound"); ENTER_V8(isolate); i::SharedFunctionInfo* raw_result = NULL; @@ -1830,22 +1758,15 @@ Local<Script> ScriptCompiler::Compile( Local<Script> Script::Compile(v8::Handle<String> source, - v8::ScriptOrigin* origin, - ScriptData* script_data) { + v8::ScriptOrigin* origin) { i::Handle<i::String> str = Utils::OpenHandle(*source); - ScriptCompiler::CachedData* cached_data = NULL; - if (script_data) { - cached_data = new ScriptCompiler::CachedData( - reinterpret_cast<const uint8_t*>(script_data->Data()), - script_data->Length()); - } if (origin) { - ScriptCompiler::Source script_source(source, *origin, cached_data); + ScriptCompiler::Source script_source(source, *origin); return ScriptCompiler::Compile( reinterpret_cast<v8::Isolate*>(str->GetIsolate()), &script_source); } - ScriptCompiler::Source script_source(source, cached_data); + ScriptCompiler::Source script_source(source); return ScriptCompiler::Compile( reinterpret_cast<v8::Isolate*>(str->GetIsolate()), &script_source); @@ -1941,8 +1862,10 @@ v8::Local<Value> v8::TryCatch::StackTrace() const { i::Handle<i::JSObject> obj(i::JSObject::cast(raw_obj), isolate_); i::Handle<i::String> name = isolate_->factory()->stack_string(); if (!i::JSReceiver::HasProperty(obj, name)) return v8::Local<Value>(); - i::Handle<i::Object> value = i::GetProperty(isolate_, obj, name); - if (value.is_null()) return v8::Local<Value>(); + i::Handle<i::Object> value; + if (!i::Object::GetProperty(obj, name).ToHandle(&value)) { + return v8::Local<Value>(); + } return v8::Utils::ToLocal(scope.CloseAndEscape(value)); } else { return v8::Local<Value>(); @@ -2028,33 +1951,28 @@ v8::Handle<v8::StackTrace> Message::GetStackTrace() const { } -static i::Handle<i::Object> CallV8HeapFunction(const char* name, - i::Handle<i::Object> recv, - int argc, - i::Handle<i::Object> argv[], - bool* has_pending_exception) { +MUST_USE_RESULT static i::MaybeHandle<i::Object> CallV8HeapFunction( + const char* name, + i::Handle<i::Object> recv, + int argc, + i::Handle<i::Object> argv[]) { i::Isolate* isolate = i::Isolate::Current(); - i::Handle<i::String> fmt_str = - isolate->factory()->InternalizeUtf8String(name); - i::Object* object_fun = - isolate->js_builtins_object()->GetPropertyNoExceptionThrown(*fmt_str); - i::Handle<i::JSFunction> fun = - i::Handle<i::JSFunction>(i::JSFunction::cast(object_fun)); - i::Handle<i::Object> value = i::Execution::Call( - isolate, fun, recv, argc, argv, has_pending_exception); - return value; + i::Handle<i::Object> object_fun = + i::Object::GetProperty( + isolate, isolate->js_builtins_object(), name).ToHandleChecked(); + i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(object_fun); + return i::Execution::Call(isolate, fun, recv, argc, argv); } -static i::Handle<i::Object> CallV8HeapFunction(const char* name, - i::Handle<i::Object> data, - bool* has_pending_exception) { +MUST_USE_RESULT static i::MaybeHandle<i::Object> CallV8HeapFunction( + const char* name, + i::Handle<i::Object> data) { i::Handle<i::Object> argv[] = { data }; return CallV8HeapFunction(name, i::Isolate::Current()->js_builtins_object(), ARRAY_SIZE(argv), - argv, - has_pending_exception); + argv); } @@ -2065,9 +1983,9 @@ int Message::GetLineNumber() const { i::HandleScope scope(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = CallV8HeapFunction("GetLineNumber", - Utils::OpenHandle(this), - &has_pending_exception); + i::Handle<i::Object> result; + has_pending_exception = !CallV8HeapFunction( + "GetLineNumber", Utils::OpenHandle(this)).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, 0); return static_cast<int>(result->Number()); } @@ -2099,10 +2017,9 @@ int Message::GetStartColumn() const { i::HandleScope scope(isolate); i::Handle<i::JSObject> data_obj = Utils::OpenHandle(this); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> start_col_obj = CallV8HeapFunction( - "GetPositionInLine", - data_obj, - &has_pending_exception); + i::Handle<i::Object> start_col_obj; + has_pending_exception = !CallV8HeapFunction( + "GetPositionInLine", data_obj).ToHandle(&start_col_obj); EXCEPTION_BAILOUT_CHECK(isolate, 0); return static_cast<int>(start_col_obj->Number()); } @@ -2114,10 +2031,9 @@ int Message::GetEndColumn() const { i::HandleScope scope(isolate); i::Handle<i::JSObject> data_obj = Utils::OpenHandle(this); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> start_col_obj = CallV8HeapFunction( - "GetPositionInLine", - data_obj, - &has_pending_exception); + i::Handle<i::Object> start_col_obj; + has_pending_exception = !CallV8HeapFunction( + "GetPositionInLine", data_obj).ToHandle(&start_col_obj); EXCEPTION_BAILOUT_CHECK(isolate, 0); i::Handle<i::JSMessageObject> message = i::Handle<i::JSMessageObject>::cast(data_obj); @@ -2146,9 +2062,9 @@ Local<String> Message::GetSourceLine() const { ENTER_V8(isolate); EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = CallV8HeapFunction("GetSourceLine", - Utils::OpenHandle(this), - &has_pending_exception); + i::Handle<i::Object> result; + has_pending_exception = !CallV8HeapFunction( + "GetSourceLine", Utils::OpenHandle(this)).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::String>()); if (result->IsString()) { return scope.Escape(Utils::ToLocal(i::Handle<i::String>::cast(result))); @@ -2173,7 +2089,7 @@ Local<StackFrame> StackTrace::GetFrame(uint32_t index) const { EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Handle<i::JSArray> self = Utils::OpenHandle(this); i::Handle<i::Object> obj = - i::Object::GetElementNoExceptionThrown(isolate, self, index); + i::Object::GetElement(isolate, self, index).ToHandleChecked(); i::Handle<i::JSObject> jsobj = i::Handle<i::JSObject>::cast(obj); return scope.Escape(Utils::StackFrameToLocal(jsobj)); } @@ -2212,7 +2128,8 @@ int StackFrame::GetLineNumber() const { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> line = GetProperty(self, "lineNumber"); + i::Handle<i::Object> line = i::Object::GetProperty( + isolate, self, "lineNumber").ToHandleChecked(); if (!line->IsSmi()) { return Message::kNoLineNumberInfo; } @@ -2225,7 +2142,8 @@ int StackFrame::GetColumn() const { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> column = GetProperty(self, "column"); + i::Handle<i::Object> column = i::Object::GetProperty( + isolate, self, "column").ToHandleChecked(); if (!column->IsSmi()) { return Message::kNoColumnInfo; } @@ -2238,7 +2156,8 @@ int StackFrame::GetScriptId() const { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> scriptId = GetProperty(self, "scriptId"); + i::Handle<i::Object> scriptId = i::Object::GetProperty( + isolate, self, "scriptId").ToHandleChecked(); if (!scriptId->IsSmi()) { return Message::kNoScriptIdInfo; } @@ -2251,7 +2170,8 @@ Local<String> StackFrame::GetScriptName() const { ENTER_V8(isolate); EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> name = GetProperty(self, "scriptName"); + i::Handle<i::Object> name = i::Object::GetProperty( + isolate, self, "scriptName").ToHandleChecked(); if (!name->IsString()) { return Local<String>(); } @@ -2264,7 +2184,8 @@ Local<String> StackFrame::GetScriptNameOrSourceURL() const { ENTER_V8(isolate); EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> name = GetProperty(self, "scriptNameOrSourceURL"); + i::Handle<i::Object> name = i::Object::GetProperty( + isolate, self, "scriptNameOrSourceURL").ToHandleChecked(); if (!name->IsString()) { return Local<String>(); } @@ -2277,7 +2198,8 @@ Local<String> StackFrame::GetFunctionName() const { ENTER_V8(isolate); EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> name = GetProperty(self, "functionName"); + i::Handle<i::Object> name = i::Object::GetProperty( + isolate, self, "functionName").ToHandleChecked(); if (!name->IsString()) { return Local<String>(); } @@ -2290,7 +2212,8 @@ bool StackFrame::IsEval() const { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> is_eval = GetProperty(self, "isEval"); + i::Handle<i::Object> is_eval = i::Object::GetProperty( + isolate, self, "isEval").ToHandleChecked(); return is_eval->IsTrue(); } @@ -2300,7 +2223,8 @@ bool StackFrame::IsConstructor() const { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Handle<i::Object> is_constructor = GetProperty(self, "isConstructor"); + i::Handle<i::Object> is_constructor = i::Object::GetProperty( + isolate, self, "isConstructor").ToHandleChecked(); return is_constructor->IsTrue(); } @@ -2308,20 +2232,18 @@ bool StackFrame::IsConstructor() const { // --- J S O N --- Local<Value> JSON::Parse(Local<String> json_string) { - i::Isolate* isolate = i::Isolate::Current(); + i::Handle<i::String> string = Utils::OpenHandle(*json_string); + i::Isolate* isolate = string->GetIsolate(); EnsureInitializedForIsolate(isolate, "v8::JSON::Parse"); ENTER_V8(isolate); i::HandleScope scope(isolate); - i::Handle<i::String> source = i::Handle<i::String>( - FlattenGetString(Utils::OpenHandle(*json_string))); + i::Handle<i::String> source = i::String::Flatten(string); EXCEPTION_PREAMBLE(isolate); + i::MaybeHandle<i::Object> maybe_result = + source->IsSeqOneByteString() ? i::JsonParser<true>::Parse(source) + : i::JsonParser<false>::Parse(source); i::Handle<i::Object> result; - if (source->IsSeqOneByteString()) { - result = i::JsonParser<true>::Parse(source); - } else { - result = i::JsonParser<false>::Parse(source); - } - has_pending_exception = result.is_null(); + has_pending_exception = !maybe_result.ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>()); return Utils::ToLocal( i::Handle<i::Object>::cast(scope.CloseAndEscape(result))); @@ -2453,60 +2375,55 @@ bool Value::IsUint32() const { bool Value::IsDate() const { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); + if (!obj->IsHeapObject()) return false; + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); return obj->HasSpecificClassOf(isolate->heap()->Date_string()); } bool Value::IsStringObject() const { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); + if (!obj->IsHeapObject()) return false; + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); return obj->HasSpecificClassOf(isolate->heap()->String_string()); } bool Value::IsSymbolObject() const { - // TODO(svenpanne): these and other test functions should be written such - // that they do not use Isolate::Current(). - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); + if (!obj->IsHeapObject()) return false; + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); return obj->HasSpecificClassOf(isolate->heap()->Symbol_string()); } bool Value::IsNumberObject() const { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); + if (!obj->IsHeapObject()) return false; + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); return obj->HasSpecificClassOf(isolate->heap()->Number_string()); } -static i::Object* LookupBuiltin(i::Isolate* isolate, - const char* builtin_name) { - i::Handle<i::String> string = - isolate->factory()->InternalizeUtf8String(builtin_name); - i::Handle<i::JSBuiltinsObject> builtins = isolate->js_builtins_object(); - return builtins->GetPropertyNoExceptionThrown(*string); -} - - static bool CheckConstructor(i::Isolate* isolate, i::Handle<i::JSObject> obj, const char* class_name) { - i::Object* constr = obj->map()->constructor(); + i::Handle<i::Object> constr(obj->map()->constructor(), isolate); if (!constr->IsJSFunction()) return false; - i::JSFunction* func = i::JSFunction::cast(constr); - return func->shared()->native() && - constr == LookupBuiltin(isolate, class_name); + i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(constr); + return func->shared()->native() && constr.is_identical_to( + i::Object::GetProperty(isolate, + isolate->js_builtins_object(), + class_name).ToHandleChecked()); } bool Value::IsNativeError() const { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); if (obj->IsJSObject()) { i::Handle<i::JSObject> js_obj(i::JSObject::cast(*obj)); + i::Isolate* isolate = js_obj->GetIsolate(); return CheckConstructor(isolate, js_obj, "$Error") || CheckConstructor(isolate, js_obj, "$EvalError") || CheckConstructor(isolate, js_obj, "$RangeError") || @@ -2521,8 +2438,9 @@ bool Value::IsNativeError() const { bool Value::IsBooleanObject() const { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(this); + if (!obj->IsHeapObject()) return false; + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); return obj->HasSpecificClassOf(isolate->heap()->Boolean_string()); } @@ -2543,7 +2461,8 @@ Local<String> Value::ToString() const { LOG_API(isolate, "ToString"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - str = i::Execution::ToString(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToString( + isolate, obj).ToHandle(&str); EXCEPTION_BAILOUT_CHECK(isolate, Local<String>()); } return ToApiHandle<String>(str); @@ -2560,7 +2479,8 @@ Local<String> Value::ToDetailString() const { LOG_API(isolate, "ToDetailString"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - str = i::Execution::ToDetailString(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToDetailString( + isolate, obj).ToHandle(&str); EXCEPTION_BAILOUT_CHECK(isolate, Local<String>()); } return ToApiHandle<String>(str); @@ -2577,7 +2497,8 @@ Local<v8::Object> Value::ToObject() const { LOG_API(isolate, "ToObject"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - val = i::Execution::ToObject(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToObject( + isolate, obj).ToHandle(&val); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>()); } return ToApiHandle<Object>(val); @@ -2605,11 +2526,12 @@ Local<Number> Value::ToNumber() const { if (obj->IsNumber()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "ToNumber"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToNumber(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToNumber( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, Local<Number>()); } return ToApiHandle<Number>(num); @@ -2622,11 +2544,12 @@ Local<Integer> Value::ToInteger() const { if (obj->IsSmi()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "ToInteger"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToInteger(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToInteger( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, Local<Integer>()); } return ToApiHandle<Integer>(num); @@ -2769,45 +2692,55 @@ void v8::DataView::CheckCast(Value* that) { void v8::Date::CheckCast(v8::Value* that) { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(that); - Utils::ApiCheck(obj->HasSpecificClassOf(isolate->heap()->Date_string()), + i::Isolate* isolate = NULL; + if (obj->IsHeapObject()) isolate = i::HeapObject::cast(*obj)->GetIsolate(); + Utils::ApiCheck(isolate != NULL && + obj->HasSpecificClassOf(isolate->heap()->Date_string()), "v8::Date::Cast()", "Could not convert to date"); } void v8::StringObject::CheckCast(v8::Value* that) { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(that); - Utils::ApiCheck(obj->HasSpecificClassOf(isolate->heap()->String_string()), + i::Isolate* isolate = NULL; + if (obj->IsHeapObject()) isolate = i::HeapObject::cast(*obj)->GetIsolate(); + Utils::ApiCheck(isolate != NULL && + obj->HasSpecificClassOf(isolate->heap()->String_string()), "v8::StringObject::Cast()", "Could not convert to StringObject"); } void v8::SymbolObject::CheckCast(v8::Value* that) { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(that); - Utils::ApiCheck(obj->HasSpecificClassOf(isolate->heap()->Symbol_string()), + i::Isolate* isolate = NULL; + if (obj->IsHeapObject()) isolate = i::HeapObject::cast(*obj)->GetIsolate(); + Utils::ApiCheck(isolate != NULL && + obj->HasSpecificClassOf(isolate->heap()->Symbol_string()), "v8::SymbolObject::Cast()", "Could not convert to SymbolObject"); } void v8::NumberObject::CheckCast(v8::Value* that) { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(that); - Utils::ApiCheck(obj->HasSpecificClassOf(isolate->heap()->Number_string()), + i::Isolate* isolate = NULL; + if (obj->IsHeapObject()) isolate = i::HeapObject::cast(*obj)->GetIsolate(); + Utils::ApiCheck(isolate != NULL && + obj->HasSpecificClassOf(isolate->heap()->Number_string()), "v8::NumberObject::Cast()", "Could not convert to NumberObject"); } void v8::BooleanObject::CheckCast(v8::Value* that) { - i::Isolate* isolate = i::Isolate::Current(); i::Handle<i::Object> obj = Utils::OpenHandle(that); - Utils::ApiCheck(obj->HasSpecificClassOf(isolate->heap()->Boolean_string()), + i::Isolate* isolate = NULL; + if (obj->IsHeapObject()) isolate = i::HeapObject::cast(*obj)->GetIsolate(); + Utils::ApiCheck(isolate != NULL && + obj->HasSpecificClassOf(isolate->heap()->Boolean_string()), "v8::BooleanObject::Cast()", "Could not convert to BooleanObject"); } @@ -2832,11 +2765,12 @@ double Value::NumberValue() const { if (obj->IsNumber()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "NumberValue"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToNumber(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToNumber( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, i::OS::nan_value()); } return num->Number(); @@ -2849,11 +2783,12 @@ int64_t Value::IntegerValue() const { if (obj->IsNumber()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "IntegerValue"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToInteger(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToInteger( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, 0); } if (num->IsSmi()) { @@ -2870,11 +2805,11 @@ Local<Int32> Value::ToInt32() const { if (obj->IsSmi()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "ToInt32"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToInt32(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToInt32(isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, Local<Int32>()); } return ToApiHandle<Int32>(num); @@ -2887,11 +2822,12 @@ Local<Uint32> Value::ToUint32() const { if (obj->IsSmi()) { num = obj; } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "ToUInt32"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - num = i::Execution::ToUint32(isolate, obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToUint32( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, Local<Uint32>()); } return ToApiHandle<Uint32>(num); @@ -2904,12 +2840,13 @@ Local<Uint32> Value::ToArrayIndex() const { if (i::Smi::cast(*obj)->value() >= 0) return Utils::Uint32ToLocal(obj); return Local<Uint32>(); } - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "ToArrayIndex"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> string_obj = - i::Execution::ToString(isolate, obj, &has_pending_exception); + i::Handle<i::Object> string_obj; + has_pending_exception = !i::Execution::ToString( + isolate, obj).ToHandle(&string_obj); EXCEPTION_BAILOUT_CHECK(isolate, Local<Uint32>()); i::Handle<i::String> str = i::Handle<i::String>::cast(string_obj); uint32_t index; @@ -2931,12 +2868,12 @@ int32_t Value::Int32Value() const { if (obj->IsSmi()) { return i::Smi::cast(*obj)->value(); } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "Int32Value (slow)"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> num = - i::Execution::ToInt32(isolate, obj, &has_pending_exception); + i::Handle<i::Object> num; + has_pending_exception = !i::Execution::ToInt32(isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, 0); if (num->IsSmi()) { return i::Smi::cast(*num)->value(); @@ -2966,9 +2903,9 @@ bool Value::Equals(Handle<Value> that) const { } i::Handle<i::Object> args[] = { other }; EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = - CallV8HeapFunction("EQUALS", obj, ARRAY_SIZE(args), args, - &has_pending_exception); + i::Handle<i::Object> result; + has_pending_exception = !CallV8HeapFunction( + "EQUALS", obj, ARRAY_SIZE(args), args).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, false); return *result == i::Smi::FromInt(i::EQUAL); } @@ -2997,7 +2934,8 @@ bool Value::StrictEquals(Handle<Value> that) const { return other->IsNumber() && obj->Number() == other->Number(); } else if (obj->IsString()) { return other->IsString() && - i::String::cast(*obj)->Equals(i::String::cast(*other)); + i::String::Equals(i::Handle<i::String>::cast(obj), + i::Handle<i::String>::cast(other)); } else if (obj->IsUndefined() || obj->IsUndetectableObject()) { return other->IsUndefined() || other->IsUndetectableObject(); } else { @@ -3007,13 +2945,11 @@ bool Value::StrictEquals(Handle<Value> that) const { bool Value::SameValue(Handle<Value> that) const { - i::Isolate* isolate = i::Isolate::Current(); if (!Utils::ApiCheck(this != NULL && !that.IsEmpty(), "v8::Value::SameValue()", "Reading from empty handle")) { return false; } - LOG_API(isolate, "SameValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::Object> other = Utils::OpenHandle(*that); return obj->SameValue(*other); @@ -3025,12 +2961,13 @@ uint32_t Value::Uint32Value() const { if (obj->IsSmi()) { return i::Smi::cast(*obj)->value(); } else { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate(); LOG_API(isolate, "Uint32Value"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> num = - i::Execution::ToUint32(isolate, obj, &has_pending_exception); + i::Handle<i::Object> num; + has_pending_exception = !i::Execution::ToUint32( + isolate, obj).ToHandle(&num); EXCEPTION_BAILOUT_CHECK(isolate, 0); if (num->IsSmi()) { return i::Smi::cast(*num)->value(); @@ -3051,14 +2988,13 @@ bool v8::Object::Set(v8::Handle<Value> key, v8::Handle<Value> value, i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); i::Handle<i::Object> value_obj = Utils::OpenHandle(*value); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::Runtime::SetObjectProperty( + has_pending_exception = i::Runtime::SetObjectProperty( isolate, self, key_obj, value_obj, static_cast<PropertyAttributes>(attribs), - i::SLOPPY); - has_pending_exception = obj.is_null(); + i::SLOPPY).is_null(); EXCEPTION_BAILOUT_CHECK(isolate, false); return true; } @@ -3072,13 +3008,8 @@ bool v8::Object::Set(uint32_t index, v8::Handle<Value> value) { i::Handle<i::JSObject> self = Utils::OpenHandle(this); i::Handle<i::Object> value_obj = Utils::OpenHandle(*value); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::JSObject::SetElement( - self, - index, - value_obj, - NONE, - i::SLOPPY); - has_pending_exception = obj.is_null(); + has_pending_exception = i::JSObject::SetElement( + self, index, value_obj, NONE, i::SLOPPY).is_null(); EXCEPTION_BAILOUT_CHECK(isolate, false); return true; } @@ -3095,12 +3026,11 @@ bool v8::Object::ForceSet(v8::Handle<Value> key, i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); i::Handle<i::Object> value_obj = Utils::OpenHandle(*value); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::ForceSetProperty( + has_pending_exception = i::Runtime::ForceSetObjectProperty( self, key_obj, value_obj, - static_cast<PropertyAttributes>(attribs)); - has_pending_exception = obj.is_null(); + static_cast<PropertyAttributes>(attribs)).is_null(); EXCEPTION_BAILOUT_CHECK(isolate, false); return true; } @@ -3129,8 +3059,9 @@ bool v8::Object::ForceDelete(v8::Handle<Value> key) { } EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::ForceDeleteProperty(self, key_obj); - has_pending_exception = obj.is_null(); + i::Handle<i::Object> obj; + has_pending_exception = !i::Runtime::DeleteObjectProperty( + isolate, self, key_obj, i::JSReceiver::FORCE_DELETION).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, false); return obj->IsTrue(); } @@ -3143,8 +3074,9 @@ Local<Value> v8::Object::Get(v8::Handle<Value> key) { i::Handle<i::Object> self = Utils::OpenHandle(this); i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = i::GetProperty(isolate, self, key_obj); - has_pending_exception = result.is_null(); + i::Handle<i::Object> result; + has_pending_exception = + !i::Runtime::GetObjectProperty(isolate, self, key_obj).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return Utils::ToLocal(result); } @@ -3156,8 +3088,9 @@ Local<Value> v8::Object::Get(uint32_t index) { ENTER_V8(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = i::Object::GetElement(isolate, self, index); - has_pending_exception = result.is_null(); + i::Handle<i::Object> result; + has_pending_exception = + !i::Object::GetElement(isolate, self, index).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return Utils::ToLocal(result); } @@ -3178,7 +3111,8 @@ PropertyAttribute v8::Object::GetPropertyAttributes(v8::Handle<Value> key) { i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); if (!key_obj->IsName()) { EXCEPTION_PREAMBLE(isolate); - key_obj = i::Execution::ToString(isolate, key_obj, &has_pending_exception); + has_pending_exception = !i::Execution::ToString( + isolate, key_obj).ToHandle(&key_obj); EXCEPTION_BAILOUT_CHECK(isolate, static_cast<PropertyAttribute>(NONE)); } i::Handle<i::Name> key_name = i::Handle<i::Name>::cast(key_obj); @@ -3210,7 +3144,8 @@ bool v8::Object::SetPrototype(Handle<Value> value) { // to propagate outside. TryCatch try_catch; EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = i::JSObject::SetPrototype(self, value_obj); + i::MaybeHandle<i::Object> result = i::JSObject::SetPrototype( + self, value_obj); has_pending_exception = result.is_null(); EXCEPTION_BAILOUT_CHECK(isolate, false); return true; @@ -3242,10 +3177,11 @@ Local<Array> v8::Object::GetPropertyNames() { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - bool threw = false; - i::Handle<i::FixedArray> value = - i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS, &threw); - if (threw) return Local<v8::Array>(); + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::FixedArray> value; + has_pending_exception = !i::JSReceiver::GetKeys( + self, i::JSReceiver::INCLUDE_PROTOS).ToHandle(&value); + EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Array>()); // Because we use caching to speed up enumeration it is important // to never change the result of the basic enumeration function so // we clone the result. @@ -3263,10 +3199,11 @@ Local<Array> v8::Object::GetOwnPropertyNames() { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - bool threw = false; - i::Handle<i::FixedArray> value = - i::GetKeysInFixedArrayFor(self, i::LOCAL_ONLY, &threw); - if (threw) return Local<v8::Array>(); + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::FixedArray> value; + has_pending_exception = !i::JSReceiver::GetKeys( + self, i::JSReceiver::LOCAL_ONLY).ToHandle(&value); + EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Array>()); // Because we use caching to speed up enumeration it is important // to never change the result of the basic enumeration function so // we clone the result. @@ -3361,8 +3298,9 @@ bool v8::Object::Delete(v8::Handle<Value> key) { i::Handle<i::JSObject> self = Utils::OpenHandle(this); i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::DeleteProperty(self, key_obj); - has_pending_exception = obj.is_null(); + i::Handle<i::Object> obj; + has_pending_exception = !i::Runtime::DeleteObjectProperty( + isolate, self, key_obj, i::JSReceiver::NORMAL_DELETION).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, false); return obj->IsTrue(); } @@ -3380,8 +3318,9 @@ bool v8::Object::Has(v8::Handle<Value> key) { i::Handle<i::JSReceiver> self = Utils::OpenHandle(this); i::Handle<i::Object> key_obj = Utils::OpenHandle(*key); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = i::HasProperty(self, key_obj); - has_pending_exception = obj.is_null(); + i::Handle<i::Object> obj; + has_pending_exception = !i::Runtime::HasObjectProperty( + isolate, self, key_obj).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, false); return obj->IsTrue(); } @@ -3399,7 +3338,13 @@ bool v8::Object::Delete(uint32_t index) { ENTER_V8(isolate); HandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - return i::JSReceiver::DeleteElement(self, index)->IsTrue(); + + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::Object> obj; + has_pending_exception = + !i::JSReceiver::DeleteElement(self, index).ToHandle(&obj); + EXCEPTION_BAILOUT_CHECK(isolate, false); + return obj->IsTrue(); } @@ -3428,9 +3373,12 @@ static inline bool ObjectSetAccessor(Object* obj, name, getter, setter, data, settings, attributes, signature); if (info.is_null()) return false; bool fast = Utils::OpenHandle(obj)->HasFastProperties(); - i::Handle<i::Object> result = - i::JSObject::SetAccessor(Utils::OpenHandle(obj), info); - if (result.is_null() || result->IsUndefined()) return false; + i::Handle<i::Object> result; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, result, + i::JSObject::SetAccessor(Utils::OpenHandle(obj), info), + false); + if (result->IsUndefined()) return false; if (fast) i::JSObject::TransformToFastProperties(Utils::OpenHandle(obj), 0); return true; } @@ -3544,10 +3492,9 @@ static Local<Value> GetPropertyByLookup(i::Isolate* isolate, // an exception. EXCEPTION_PREAMBLE(isolate); PropertyAttributes ignored; - i::Handle<i::Object> result = - i::Object::GetProperty(receiver, receiver, lookup, name, - &ignored); - has_pending_exception = result.is_null(); + i::Handle<i::Object> result; + has_pending_exception = !i::Object::GetProperty( + receiver, receiver, lookup, name, &ignored).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return Utils::ToLocal(result); @@ -3564,7 +3511,7 @@ Local<Value> v8::Object::GetRealNamedPropertyInPrototypeChain( i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this); i::Handle<i::String> key_obj = Utils::OpenHandle(*key); i::LookupResult lookup(isolate); - self_obj->LookupRealNamedPropertyInPrototypes(*key_obj, &lookup); + self_obj->LookupRealNamedPropertyInPrototypes(key_obj, &lookup); return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup); } @@ -3577,7 +3524,7 @@ Local<Value> v8::Object::GetRealNamedProperty(Handle<String> key) { i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this); i::Handle<i::String> key_obj = Utils::OpenHandle(*key); i::LookupResult lookup(isolate); - self_obj->LookupRealNamedProperty(*key_obj, &lookup); + self_obj->LookupRealNamedProperty(key_obj, &lookup); return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup); } @@ -3596,8 +3543,7 @@ void v8::Object::TurnOnAccessCheck() { // as optimized code does not always handle access checks. i::Deoptimizer::DeoptimizeGlobalObject(*obj); - i::Handle<i::Map> new_map = - isolate->factory()->CopyMap(i::Handle<i::Map>(obj->map())); + i::Handle<i::Map> new_map = i::Map::Copy(i::Handle<i::Map>(obj->map())); new_map->set_is_access_check_needed(true); obj->set_map(*new_map); } @@ -3614,35 +3560,20 @@ Local<v8::Object> v8::Object::Clone() { ENTER_V8(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::JSObject> result = i::JSObject::Copy(self); + i::Handle<i::JSObject> result = isolate->factory()->CopyJSObject(self); has_pending_exception = result.is_null(); EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>()); return Utils::ToLocal(result); } -static i::Context* GetCreationContext(i::JSObject* object) { - i::Object* constructor = object->map()->constructor(); - i::JSFunction* function; - if (!constructor->IsJSFunction()) { - // Functions have null as a constructor, - // but any JSFunction knows its context immediately. - ASSERT(object->IsJSFunction()); - function = i::JSFunction::cast(object); - } else { - function = i::JSFunction::cast(constructor); - } - return function->context()->native_context(); -} - - Local<v8::Context> v8::Object::CreationContext() { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); ON_BAILOUT(isolate, "v8::Object::CreationContext()", return Local<v8::Context>()); ENTER_V8(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - i::Context* context = GetCreationContext(*self); + i::Context* context = self->GetCreationContext(); return Utils::ToLocal(i::Handle<i::Context>(context)); } @@ -3685,7 +3616,7 @@ v8::Local<v8::Value> v8::Object::GetHiddenValue(v8::Handle<v8::String> key) { i::Handle<i::String> key_obj = Utils::OpenHandle(*key); i::Handle<i::String> key_string = isolate->factory()->InternalizeString(key_obj); - i::Handle<i::Object> result(self->GetHiddenProperty(*key_string), isolate); + i::Handle<i::Object> result(self->GetHiddenProperty(key_string), isolate); if (result->IsTheHole()) return v8::Local<v8::Value>(); return Utils::ToLocal(result); } @@ -3735,8 +3666,7 @@ void PrepareExternalArrayElements(i::Handle<i::JSObject> object, object, GetElementsKindFromExternalArrayType(array_type)); - object->set_map(*external_array_map); - object->set_elements(*array); + i::JSObject::SetMapAndElements(object, external_array_map, array); } } // namespace @@ -3901,15 +3831,17 @@ Local<v8::Value> Object::CallAsFunction(v8::Handle<v8::Value> recv, fun = i::Handle<i::JSFunction>::cast(obj); } else { EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> delegate = i::Execution::TryGetFunctionDelegate( - isolate, obj, &has_pending_exception); + i::Handle<i::Object> delegate; + has_pending_exception = !i::Execution::TryGetFunctionDelegate( + isolate, obj).ToHandle(&delegate); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); fun = i::Handle<i::JSFunction>::cast(delegate); recv_obj = obj; } EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> returned = i::Execution::Call( - isolate, fun, recv_obj, argc, args, &has_pending_exception, true); + i::Handle<i::Object> returned; + has_pending_exception = !i::Execution::Call( + isolate, fun, recv_obj, argc, args, true).ToHandle(&returned); EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Value>()); return Utils::ToLocal(scope.CloseAndEscape(returned)); } @@ -3931,21 +3863,24 @@ Local<v8::Value> Object::CallAsConstructor(int argc, if (obj->IsJSFunction()) { i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(obj); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> returned = - i::Execution::New(fun, argc, args, &has_pending_exception); + i::Handle<i::Object> returned; + has_pending_exception = !i::Execution::New( + fun, argc, args).ToHandle(&returned); EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>()); return Utils::ToLocal(scope.CloseAndEscape( i::Handle<i::JSObject>::cast(returned))); } EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> delegate = i::Execution::TryGetConstructorDelegate( - isolate, obj, &has_pending_exception); + i::Handle<i::Object> delegate; + has_pending_exception = !i::Execution::TryGetConstructorDelegate( + isolate, obj).ToHandle(&delegate); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>()); if (!delegate->IsUndefined()) { i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(delegate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> returned = i::Execution::Call( - isolate, fun, obj, argc, args, &has_pending_exception); + i::Handle<i::Object> returned; + has_pending_exception = !i::Execution::Call( + isolate, fun, obj, argc, args).ToHandle(&returned); EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>()); ASSERT(!delegate->IsUndefined()); return Utils::ToLocal(scope.CloseAndEscape(returned)); @@ -3986,8 +3921,9 @@ Local<v8::Object> Function::NewInstance(int argc, STATIC_ASSERT(sizeof(v8::Handle<v8::Value>) == sizeof(i::Object**)); i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> returned = - i::Execution::New(function, argc, args, &has_pending_exception); + i::Handle<i::Object> returned; + has_pending_exception = !i::Execution::New( + function, argc, args).ToHandle(&returned); EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>()); return scope.Escape(Utils::ToLocal(i::Handle<i::JSObject>::cast(returned))); } @@ -4001,21 +3937,17 @@ Local<v8::Value> Function::Call(v8::Handle<v8::Value> recv, int argc, ENTER_V8(isolate); i::Logger::TimerEventScope timer_scope( isolate, i::Logger::TimerEventScope::v8_execute); - i::Object* raw_result = NULL; - { - i::HandleScope scope(isolate); - i::Handle<i::JSFunction> fun = Utils::OpenHandle(this); - i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv); - STATIC_ASSERT(sizeof(v8::Handle<v8::Value>) == sizeof(i::Object**)); - i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv); - EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> returned = i::Execution::Call( - isolate, fun, recv_obj, argc, args, &has_pending_exception, true); - EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Object>()); - raw_result = *returned; - } - i::Handle<i::Object> result(raw_result, isolate); - return Utils::ToLocal(result); + i::HandleScope scope(isolate); + i::Handle<i::JSFunction> fun = Utils::OpenHandle(this); + i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv); + STATIC_ASSERT(sizeof(v8::Handle<v8::Value>) == sizeof(i::Object**)); + i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv); + EXCEPTION_PREAMBLE(isolate); + i::Handle<i::Object> returned; + has_pending_exception = !i::Execution::Call( + isolate, fun, recv_obj, argc, args, true).ToHandle(&returned); + EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Object>()); + return Utils::ToLocal(scope.CloseAndEscape(returned)); } @@ -4053,7 +3985,7 @@ Handle<Value> Function::GetDisplayName() const { isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("displayName")); i::LookupResult lookup(isolate); - func->LookupRealNamedProperty(*property_name, &lookup); + func->LookupRealNamedProperty(property_name, &lookup); if (lookup.IsFound()) { i::Object* value = lookup.GetLazyValue(); if (value && value->IsString()) { @@ -4069,7 +4001,7 @@ ScriptOrigin Function::GetScriptOrigin() const { i::Handle<i::JSFunction> func = Utils::OpenHandle(this); if (func->shared()->script()->IsScript()) { i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); - i::Handle<i::Object> scriptName = GetScriptNameOrSourceURL(script); + i::Handle<i::Object> scriptName = i::Script::GetNameOrSourceURL(script); v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(func->GetIsolate()); v8::ScriptOrigin origin( Utils::ToLocal(scriptName), @@ -4088,7 +4020,7 @@ int Function::GetScriptLineNumber() const { i::Handle<i::JSFunction> func = Utils::OpenHandle(this); if (func->shared()->script()->IsScript()) { i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); - return i::GetScriptLineNumber(script, func->shared()->start_position()); + return i::Script::GetLineNumber(script, func->shared()->start_position()); } return kLineOffsetNotFound; } @@ -4098,7 +4030,7 @@ int Function::GetScriptColumnNumber() const { i::Handle<i::JSFunction> func = Utils::OpenHandle(this); if (func->shared()->script()->IsScript()) { i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); - return i::GetScriptColumnNumber(script, func->shared()->start_position()); + return i::Script::GetColumnNumber(script, func->shared()->start_position()); } return kLineOffsetNotFound; } @@ -4678,7 +4610,7 @@ int String::WriteUtf8(char* buffer, ENTER_V8(isolate); i::Handle<i::String> str = Utils::OpenHandle(this); if (options & HINT_MANY_WRITES_EXPECTED) { - FlattenString(str); // Flatten the string for efficiency. + str = i::String::Flatten(str); // Flatten the string for efficiency. } const int string_length = str->length(); bool write_null = !(options & NO_NULL_TERMINATION); @@ -4713,7 +4645,7 @@ int String::WriteUtf8(char* buffer, } } // Recursive slow path can potentially be unreasonable slow. Flatten. - str = FlattenGetString(str); + str = i::String::Flatten(str); Utf8WriterVisitor writer(buffer, capacity, false, replace_invalid_utf8); i::String::VisitFlat(&writer, *str); return writer.CompleteWrite(write_null, nchars_ref); @@ -4735,7 +4667,7 @@ static inline int WriteHelper(const String* string, if (options & String::HINT_MANY_WRITES_EXPECTED) { // Flatten the string for efficiency. This applies whether we are // using StringCharacterStream or Get(i) to access the characters. - FlattenString(str); + str = i::String::Flatten(str); } int end = start + length; if ((length == -1) || (length > str->length() - start) ) @@ -5288,9 +5220,9 @@ Local<v8::Object> ObjectTemplate::NewInstance() { LOG_API(isolate, "ObjectTemplate::NewInstance"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = - i::Execution::InstantiateObject(Utils::OpenHandle(this), - &has_pending_exception); + i::Handle<i::Object> obj; + has_pending_exception = !i::Execution::InstantiateObject( + Utils::OpenHandle(this)).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>()); return Utils::ToLocal(i::Handle<i::JSObject>::cast(obj)); } @@ -5303,9 +5235,9 @@ Local<v8::Function> FunctionTemplate::GetFunction() { LOG_API(isolate, "FunctionTemplate::GetFunction"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> obj = - i::Execution::InstantiateFunction(Utils::OpenHandle(this), - &has_pending_exception); + i::Handle<i::Object> obj; + has_pending_exception = !i::Execution::InstantiateFunction( + Utils::OpenHandle(this)).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Function>()); return Utils::ToLocal(i::Handle<i::JSFunction>::cast(obj)); } @@ -5356,19 +5288,21 @@ inline int StringLength(const uint16_t* string) { } -inline i::Handle<i::String> NewString(i::Factory* factory, - String::NewStringType type, - i::Vector<const char> string) { - if (type ==String::kInternalizedString) { +MUST_USE_RESULT +inline i::MaybeHandle<i::String> NewString(i::Factory* factory, + String::NewStringType type, + i::Vector<const char> string) { + if (type == String::kInternalizedString) { return factory->InternalizeUtf8String(string); } return factory->NewStringFromUtf8(string); } -inline i::Handle<i::String> NewString(i::Factory* factory, - String::NewStringType type, - i::Vector<const uint8_t> string) { +MUST_USE_RESULT +inline i::MaybeHandle<i::String> NewString(i::Factory* factory, + String::NewStringType type, + i::Vector<const uint8_t> string) { if (type == String::kInternalizedString) { return factory->InternalizeOneByteString(string); } @@ -5376,9 +5310,10 @@ inline i::Handle<i::String> NewString(i::Factory* factory, } -inline i::Handle<i::String> NewString(i::Factory* factory, - String::NewStringType type, - i::Vector<const uint16_t> string) { +MUST_USE_RESULT +inline i::MaybeHandle<i::String> NewString(i::Factory* factory, + String::NewStringType type, + i::Vector<const uint16_t> string) { if (type == String::kInternalizedString) { return factory->InternalizeTwoByteString(string); } @@ -5401,10 +5336,11 @@ inline Local<String> NewString(Isolate* v8_isolate, } ENTER_V8(isolate); if (length == -1) length = StringLength(data); - i::Handle<i::String> result = NewString( - isolate->factory(), type, i::Vector<const Char>(data, length)); // We do not expect this to fail. Change this if it does. - CHECK(!result.is_null()); + i::Handle<i::String> result = NewString( + isolate->factory(), + type, + i::Vector<const Char>(data, length)).ToHandleChecked(); if (type == String::kUndetectableString) { result->MarkAsUndetectable(); } @@ -5460,10 +5396,9 @@ Local<String> v8::String::Concat(Handle<String> left, Handle<String> right) { LOG_API(isolate, "String::New(char)"); ENTER_V8(isolate); i::Handle<i::String> right_string = Utils::OpenHandle(*right); - i::Handle<i::String> result = isolate->factory()->NewConsString(left_string, - right_string); // We do not expect this to fail. Change this if it does. - CHECK(!result.is_null()); + i::Handle<i::String> result = isolate->factory()->NewConsString( + left_string, right_string).ToHandleChecked(); return Utils::ToLocal(result); } @@ -5471,22 +5406,18 @@ Local<String> v8::String::Concat(Handle<String> left, Handle<String> right) { static i::Handle<i::String> NewExternalStringHandle( i::Isolate* isolate, v8::String::ExternalStringResource* resource) { - i::Handle<i::String> result = - isolate->factory()->NewExternalStringFromTwoByte(resource); // We do not expect this to fail. Change this if it does. - CHECK(!result.is_null()); - return result; + return isolate->factory()->NewExternalStringFromTwoByte( + resource).ToHandleChecked(); } static i::Handle<i::String> NewExternalAsciiStringHandle( i::Isolate* isolate, v8::String::ExternalAsciiStringResource* resource) { - i::Handle<i::String> result = - isolate->factory()->NewExternalStringFromAscii(resource); // We do not expect this to fail. Change this if it does. - CHECK(!result.is_null()); - return result; + return isolate->factory()->NewExternalStringFromAscii( + resource).ToHandleChecked(); } @@ -5603,16 +5534,17 @@ Local<v8::Value> v8::NumberObject::New(Isolate* isolate, double value) { LOG_API(i_isolate, "NumberObject::New"); ENTER_V8(i_isolate); i::Handle<i::Object> number = i_isolate->factory()->NewNumber(value); - i::Handle<i::Object> obj = i_isolate->factory()->ToObject(number); + i::Handle<i::Object> obj = + i::Object::ToObject(i_isolate, number).ToHandleChecked(); return Utils::ToLocal(obj); } double v8::NumberObject::ValueOf() const { - i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "NumberObject::NumberValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj); + i::Isolate* isolate = jsvalue->GetIsolate(); + LOG_API(isolate, "NumberObject::NumberValue"); return jsvalue->value()->Number(); } @@ -5626,36 +5558,38 @@ Local<v8::Value> v8::BooleanObject::New(bool value) { ? isolate->heap()->true_value() : isolate->heap()->false_value(), isolate); - i::Handle<i::Object> obj = isolate->factory()->ToObject(boolean); + i::Handle<i::Object> obj = + i::Object::ToObject(isolate, boolean).ToHandleChecked(); return Utils::ToLocal(obj); } bool v8::BooleanObject::ValueOf() const { - i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "BooleanObject::BooleanValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj); + i::Isolate* isolate = jsvalue->GetIsolate(); + LOG_API(isolate, "BooleanObject::BooleanValue"); return jsvalue->value()->IsTrue(); } Local<v8::Value> v8::StringObject::New(Handle<String> value) { - i::Isolate* isolate = i::Isolate::Current(); + i::Handle<i::String> string = Utils::OpenHandle(*value); + i::Isolate* isolate = string->GetIsolate(); EnsureInitializedForIsolate(isolate, "v8::StringObject::New()"); LOG_API(isolate, "StringObject::New"); ENTER_V8(isolate); i::Handle<i::Object> obj = - isolate->factory()->ToObject(Utils::OpenHandle(*value)); + i::Object::ToObject(isolate, string).ToHandleChecked(); return Utils::ToLocal(obj); } Local<v8::String> v8::StringObject::ValueOf() const { - i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "StringObject::StringValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj); + i::Isolate* isolate = jsvalue->GetIsolate(); + LOG_API(isolate, "StringObject::StringValue"); return Utils::ToLocal( i::Handle<i::String>(i::String::cast(jsvalue->value()))); } @@ -5666,17 +5600,17 @@ Local<v8::Value> v8::SymbolObject::New(Isolate* isolate, Handle<Symbol> value) { EnsureInitializedForIsolate(i_isolate, "v8::SymbolObject::New()"); LOG_API(i_isolate, "SymbolObject::New"); ENTER_V8(i_isolate); - i::Handle<i::Object> obj = - i_isolate->factory()->ToObject(Utils::OpenHandle(*value)); + i::Handle<i::Object> obj = i::Object::ToObject( + i_isolate, Utils::OpenHandle(*value)).ToHandleChecked(); return Utils::ToLocal(obj); } Local<v8::Symbol> v8::SymbolObject::ValueOf() const { - i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "SymbolObject::SymbolValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj); + i::Isolate* isolate = jsvalue->GetIsolate(); + LOG_API(isolate, "SymbolObject::SymbolValue"); return Utils::ToLocal( i::Handle<i::Symbol>(i::Symbol::cast(jsvalue->value()))); } @@ -5692,24 +5626,26 @@ Local<v8::Value> v8::Date::New(Isolate* isolate, double time) { } ENTER_V8(i_isolate); EXCEPTION_PREAMBLE(i_isolate); - i::Handle<i::Object> obj = - i::Execution::NewDate(i_isolate, time, &has_pending_exception); + i::Handle<i::Object> obj; + has_pending_exception = !i::Execution::NewDate( + i_isolate, time).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(i_isolate, Local<v8::Value>()); return Utils::ToLocal(obj); } double v8::Date::ValueOf() const { - i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "Date::NumberValue"); i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::JSDate> jsdate = i::Handle<i::JSDate>::cast(obj); + i::Isolate* isolate = jsdate->GetIsolate(); + LOG_API(isolate, "Date::NumberValue"); return jsdate->value()->Number(); } void v8::Date::DateTimeConfigurationChangeNotification(Isolate* isolate) { i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); + if (!i_isolate->IsInitialized()) return; ON_BAILOUT(i_isolate, "v8::Date::DateTimeConfigurationChangeNotification()", return); LOG_API(i_isolate, "Date::DateTimeConfigurationChangeNotification"); @@ -5752,10 +5688,10 @@ Local<v8::RegExp> v8::RegExp::New(Handle<String> pattern, LOG_API(isolate, "RegExp::New"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::JSRegExp> obj = i::Execution::NewJSRegExp( + i::Handle<i::JSRegExp> obj; + has_pending_exception = !i::Execution::NewJSRegExp( Utils::OpenHandle(*pattern), - RegExpFlagsToString(flags), - &has_pending_exception); + RegExpFlagsToString(flags)).ToHandle(&obj); EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::RegExp>()); return Utils::ToLocal(i::Handle<i::JSRegExp>::cast(obj)); } @@ -5823,7 +5759,8 @@ Local<Object> Array::CloneElementAt(uint32_t index) { i::Handle<i::JSObject> paragon_handle(i::JSObject::cast(paragon)); EXCEPTION_PREAMBLE(isolate); ENTER_V8(isolate); - i::Handle<i::JSObject> result = i::JSObject::Copy(paragon_handle); + i::Handle<i::JSObject> result = + isolate->factory()->CopyJSObject(paragon_handle); has_pending_exception = result.is_null(); EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>()); return Utils::ToLocal(result); @@ -5839,14 +5776,14 @@ bool Value::IsPromise() const { ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); i::Handle<i::Object> argv[] = { obj }; - i::Handle<i::Object> b = i::Execution::Call( + i::Handle<i::Object> b; + has_pending_exception = !i::Execution::Call( isolate, handle( isolate->context()->global_object()->native_context()->is_promise()), isolate->factory()->undefined_value(), ARRAY_SIZE(argv), argv, - &has_pending_exception, - false); + false).ToHandle(&b); EXCEPTION_BAILOUT_CHECK(isolate, false); return b->BooleanValue(); } @@ -5857,14 +5794,14 @@ Local<Promise::Resolver> Promise::Resolver::New(Isolate* v8_isolate) { LOG_API(isolate, "Promise::Resolver::New"); ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = i::Execution::Call( + i::Handle<i::Object> result; + has_pending_exception = !i::Execution::Call( isolate, handle(isolate->context()->global_object()->native_context()-> promise_create()), isolate->factory()->undefined_value(), 0, NULL, - &has_pending_exception, - false); + false).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Promise::Resolver>()); return Local<Promise::Resolver>::Cast(Utils::ToLocal(result)); } @@ -5883,14 +5820,13 @@ void Promise::Resolver::Resolve(Handle<Value> value) { ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); i::Handle<i::Object> argv[] = { promise, Utils::OpenHandle(*value) }; - i::Execution::Call( + has_pending_exception = i::Execution::Call( isolate, handle(isolate->context()->global_object()->native_context()-> promise_resolve()), isolate->factory()->undefined_value(), ARRAY_SIZE(argv), argv, - &has_pending_exception, - false); + false).is_null(); EXCEPTION_BAILOUT_CHECK(isolate, /* void */ ;); } @@ -5902,14 +5838,13 @@ void Promise::Resolver::Reject(Handle<Value> value) { ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); i::Handle<i::Object> argv[] = { promise, Utils::OpenHandle(*value) }; - i::Execution::Call( + has_pending_exception = i::Execution::Call( isolate, handle(isolate->context()->global_object()->native_context()-> promise_reject()), isolate->factory()->undefined_value(), ARRAY_SIZE(argv), argv, - &has_pending_exception, - false); + false).is_null(); EXCEPTION_BAILOUT_CHECK(isolate, /* void */ ;); } @@ -5921,14 +5856,14 @@ Local<Promise> Promise::Chain(Handle<Function> handler) { ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); i::Handle<i::Object> argv[] = { Utils::OpenHandle(*handler) }; - i::Handle<i::Object> result = i::Execution::Call( + i::Handle<i::Object> result; + has_pending_exception = !i::Execution::Call( isolate, handle(isolate->context()->global_object()->native_context()-> promise_chain()), promise, ARRAY_SIZE(argv), argv, - &has_pending_exception, - false); + false).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Promise>()); return Local<Promise>::Cast(Utils::ToLocal(result)); } @@ -5941,14 +5876,14 @@ Local<Promise> Promise::Catch(Handle<Function> handler) { ENTER_V8(isolate); EXCEPTION_PREAMBLE(isolate); i::Handle<i::Object> argv[] = { Utils::OpenHandle(*handler) }; - i::Handle<i::Object> result = i::Execution::Call( + i::Handle<i::Object> result; + has_pending_exception = !i::Execution::Call( isolate, handle(isolate->context()->global_object()->native_context()-> promise_catch()), promise, ARRAY_SIZE(argv), argv, - &has_pending_exception, - false); + false).ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Promise>()); return Local<Promise>::Cast(Utils::ToLocal(result)); } @@ -6085,6 +6020,7 @@ i::Handle<i::JSTypedArray> NewTypedArray( ASSERT(byte_offset % sizeof(ElementType) == 0); CHECK(length <= (std::numeric_limits<size_t>::max() / sizeof(ElementType))); + CHECK(length <= static_cast<size_t>(i::Smi::kMaxValue)); size_t byte_length = length * sizeof(ElementType); SetupArrayBufferView( isolate, obj, buffer, byte_offset, byte_length); @@ -6099,7 +6035,7 @@ i::Handle<i::JSTypedArray> NewTypedArray( static_cast<uint8_t*>(buffer->backing_store()) + byte_offset); i::Handle<i::Map> map = i::JSObject::GetElementsTransitionMap(obj, elements_kind); - obj->set_map_and_elements(*map, *elements); + i::JSObject::SetMapAndElements(obj, map, elements); return obj; } @@ -6107,12 +6043,17 @@ i::Handle<i::JSTypedArray> NewTypedArray( #define TYPED_ARRAY_NEW(Type, type, TYPE, ctype, size) \ Local<Type##Array> Type##Array::New(Handle<ArrayBuffer> array_buffer, \ size_t byte_offset, size_t length) { \ - i::Isolate* isolate = i::Isolate::Current(); \ + i::Isolate* isolate = Utils::OpenHandle(*array_buffer)->GetIsolate(); \ EnsureInitializedForIsolate(isolate, \ "v8::" #Type "Array::New(Handle<ArrayBuffer>, size_t, size_t)"); \ LOG_API(isolate, \ "v8::" #Type "Array::New(Handle<ArrayBuffer>, size_t, size_t)"); \ ENTER_V8(isolate); \ + if (!Utils::ApiCheck(length <= static_cast<size_t>(i::Smi::kMaxValue), \ + "v8::" #Type "Array::New(Handle<ArrayBuffer>, size_t, size_t)", \ + "length exceeds max allowed value")) { \ + return Local<Type##Array>(); \ + } \ i::Handle<i::JSTypedArray> obj = \ NewTypedArray<ctype, v8::kExternal##Type##Array, \ i::EXTERNAL_##TYPE##_ELEMENTS>( \ @@ -6126,13 +6067,13 @@ TYPED_ARRAYS(TYPED_ARRAY_NEW) Local<DataView> DataView::New(Handle<ArrayBuffer> array_buffer, size_t byte_offset, size_t byte_length) { - i::Isolate* isolate = i::Isolate::Current(); + i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer); + i::Isolate* isolate = buffer->GetIsolate(); EnsureInitializedForIsolate( isolate, "v8::DataView::New(void*, size_t, size_t)"); LOG_API(isolate, "v8::DataView::New(void*, size_t, size_t)"); ENTER_V8(isolate); i::Handle<i::JSDataView> obj = isolate->factory()->NewJSDataView(); - i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer); SetupArrayBufferView( isolate, obj, buffer, byte_offset, byte_length); return Utils::ToLocal(obj); @@ -6156,13 +6097,16 @@ Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) { i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry(); i::Handle<i::String> part = i_isolate->factory()->for_string(); i::Handle<i::JSObject> symbols = - i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part)); - i::Handle<i::Object> symbol = i::JSObject::GetProperty(symbols, i_name); + i::Handle<i::JSObject>::cast( + i::Object::GetPropertyOrElement(registry, part).ToHandleChecked()); + i::Handle<i::Object> symbol = + i::Object::GetPropertyOrElement(symbols, i_name).ToHandleChecked(); if (!symbol->IsSymbol()) { ASSERT(symbol->IsUndefined()); symbol = i_isolate->factory()->NewSymbol(); i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name); - i::JSObject::SetProperty(symbols, i_name, symbol, NONE, i::STRICT); + i::JSObject::SetProperty( + symbols, i_name, symbol, NONE, i::STRICT).Assert(); } return Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol)); } @@ -6174,13 +6118,16 @@ Local<Symbol> v8::Symbol::ForApi(Isolate* isolate, Local<String> name) { i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry(); i::Handle<i::String> part = i_isolate->factory()->for_api_string(); i::Handle<i::JSObject> symbols = - i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part)); - i::Handle<i::Object> symbol = i::JSObject::GetProperty(symbols, i_name); + i::Handle<i::JSObject>::cast( + i::Object::GetPropertyOrElement(registry, part).ToHandleChecked()); + i::Handle<i::Object> symbol = + i::Object::GetPropertyOrElement(symbols, i_name).ToHandleChecked(); if (!symbol->IsSymbol()) { ASSERT(symbol->IsUndefined()); symbol = i_isolate->factory()->NewSymbol(); i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name); - i::JSObject::SetProperty(symbols, i_name, symbol, NONE, i::STRICT); + i::JSObject::SetProperty( + symbols, i_name, symbol, NONE, i::STRICT).Assert(); } return Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol)); } @@ -6204,13 +6151,16 @@ Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) { i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry(); i::Handle<i::String> part = i_isolate->factory()->private_api_string(); i::Handle<i::JSObject> privates = - i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part)); - i::Handle<i::Object> symbol = i::JSObject::GetProperty(privates, i_name); + i::Handle<i::JSObject>::cast( + i::Object::GetPropertyOrElement(registry, part).ToHandleChecked()); + i::Handle<i::Object> symbol = + i::Object::GetPropertyOrElement(privates, i_name).ToHandleChecked(); if (!symbol->IsSymbol()) { ASSERT(symbol->IsUndefined()); symbol = i_isolate->factory()->NewPrivateSymbol(); i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name); - i::JSObject::SetProperty(privates, i_name, symbol, NONE, i::STRICT); + i::JSObject::SetProperty( + privates, i_name, symbol, NONE, i::STRICT).Assert(); } Local<Symbol> result = Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol)); return v8::Handle<Private>(reinterpret_cast<Private*>(*result)); @@ -6303,13 +6253,17 @@ void V8::SetCaptureStackTraceForUncaughtExceptions( void V8::SetCounterFunction(CounterLookupCallback callback) { - i::Isolate* isolate = EnterIsolateIfNeeded(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); + // TODO(svenpanne) The Isolate should really be a parameter. + if (isolate == NULL) return; isolate->stats_table()->SetCounterFunction(callback); } void V8::SetCreateHistogramFunction(CreateHistogramCallback callback) { - i::Isolate* isolate = EnterIsolateIfNeeded(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); + // TODO(svenpanne) The Isolate should really be a parameter. + if (isolate == NULL) return; isolate->stats_table()->SetCreateHistogramFunction(callback); isolate->InitializeLoggingAndCounters(); isolate->counters()->ResetHistograms(); @@ -6317,7 +6271,9 @@ void V8::SetCreateHistogramFunction(CreateHistogramCallback callback) { void V8::SetAddHistogramSampleFunction(AddHistogramSampleCallback callback) { - i::Isolate* isolate = EnterIsolateIfNeeded(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); + // TODO(svenpanne) The Isolate should really be a parameter. + if (isolate == NULL) return; isolate->stats_table()-> SetAddHistogramSampleFunction(callback); } @@ -6498,43 +6454,23 @@ void V8::RemoveMemoryAllocationCallback(MemoryAllocationCallback callback) { } -void V8::AddCallCompletedCallback(CallCompletedCallback callback) { - if (callback == NULL) return; - i::V8::AddCallCompletedCallback(callback); -} - - void V8::RunMicrotasks(Isolate* isolate) { - i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); - i::HandleScope scope(i_isolate); - i::V8::RunMicrotasks(i_isolate); + isolate->RunMicrotasks(); } void V8::EnqueueMicrotask(Isolate* isolate, Handle<Function> microtask) { - i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); - ENTER_V8(i_isolate); - i::Execution::EnqueueMicrotask(i_isolate, Utils::OpenHandle(*microtask)); + isolate->EnqueueMicrotask(microtask); } void V8::SetAutorunMicrotasks(Isolate* isolate, bool autorun) { - reinterpret_cast<i::Isolate*>(isolate)->set_autorun_microtasks(autorun); -} - - -void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) { - i::V8::RemoveCallCompletedCallback(callback); + isolate->SetAutorunMicrotasks(autorun); } void V8::TerminateExecution(Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - if (isolate != NULL) { - reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->TerminateExecution(); - } else { - i::Isolate::GetDefaultIsolateStackGuard()->TerminateExecution(); - } + reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->TerminateExecution(); } @@ -6653,6 +6589,18 @@ Isolate::AllowJavascriptExecutionScope::~AllowJavascriptExecutionScope() { } +Isolate::SuppressMicrotaskExecutionScope::SuppressMicrotaskExecutionScope( + Isolate* isolate) + : isolate_(reinterpret_cast<i::Isolate*>(isolate)) { + isolate_->handle_scope_implementer()->IncrementCallDepth(); +} + + +Isolate::SuppressMicrotaskExecutionScope::~SuppressMicrotaskExecutionScope() { + isolate_->handle_scope_implementer()->DecrementCallDepth(); +} + + void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) { i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); if (!isolate->IsInitialized()) { @@ -6678,6 +6626,39 @@ void Isolate::SetEventLogger(LogEventCallback that) { isolate->set_event_logger(that); } + +void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) { + if (callback == NULL) return; + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); + isolate->AddCallCompletedCallback(callback); +} + + +void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); + isolate->RemoveCallCompletedCallback(callback); +} + + +void Isolate::RunMicrotasks() { + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this); + i::HandleScope scope(i_isolate); + i_isolate->RunMicrotasks(); +} + + +void Isolate::EnqueueMicrotask(Handle<Function> microtask) { + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this); + ENTER_V8(i_isolate); + i::Execution::EnqueueMicrotask(i_isolate, Utils::OpenHandle(*microtask)); +} + + +void Isolate::SetAutorunMicrotasks(bool autorun) { + reinterpret_cast<i::Isolate*>(this)->set_autorun_microtasks(autorun); +} + + String::Utf8Value::Utf8Value(v8::Handle<v8::Value> obj) : str_(NULL), length_(0) { i::Isolate* isolate = i::Isolate::Current(); @@ -6807,8 +6788,6 @@ Local<Value> Exception::Error(v8::Handle<v8::String> raw_message) { // --- D e b u g S u p p o r t --- -#ifdef ENABLE_DEBUGGER_SUPPORT - bool Debug::SetDebugEventListener2(EventCallback2 that, Handle<Value> data) { i::Isolate* isolate = i::Isolate::Current(); EnsureInitializedForIsolate(isolate, "v8::Debug::SetDebugEventListener2()"); @@ -6837,34 +6816,19 @@ bool Debug::SetDebugEventListener(v8::Handle<v8::Object> that, void Debug::DebugBreak(Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - if (isolate != NULL) { - reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->DebugBreak(); - } else { - i::Isolate::GetDefaultIsolateStackGuard()->DebugBreak(); - } + reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->DebugBreak(); } void Debug::CancelDebugBreak(Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - if (isolate != NULL) { - i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); - internal_isolate->stack_guard()->Continue(i::DEBUGBREAK); - } else { - i::Isolate::GetDefaultIsolateStackGuard()->Continue(i::DEBUGBREAK); - } + i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); + internal_isolate->stack_guard()->Continue(i::DEBUGBREAK); } -void Debug::DebugBreakForCommand(ClientData* data, Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - if (isolate != NULL) { - i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); - internal_isolate->debugger()->EnqueueDebugCommand(data); - } else { - i::Isolate::GetDefaultIsolateDebugger()->EnqueueDebugCommand(data); - } +void Debug::DebugBreakForCommand(Isolate* isolate, ClientData* data) { + i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); + internal_isolate->debugger()->EnqueueDebugCommand(data); } @@ -6886,21 +6850,6 @@ void Debug::SendCommand(Isolate* isolate, } -void Debug::SendCommand(const uint16_t* command, int length, - ClientData* client_data, - Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - if (isolate != NULL) { - i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); - internal_isolate->debugger()->ProcessCommand( - i::Vector<const uint16_t>(command, length), client_data); - } else { - i::Isolate::GetDefaultIsolateDebugger()->ProcessCommand( - i::Vector<const uint16_t>(command, length), client_data); - } -} - - void Debug::SetHostDispatchHandler(HostDispatchHandler handler, int period) { i::Isolate* isolate = i::Isolate::Current(); @@ -6928,17 +6877,17 @@ Local<Value> Debug::Call(v8::Handle<v8::Function> fun, if (!isolate->IsInitialized()) return Local<Value>(); ON_BAILOUT(isolate, "v8::Debug::Call()", return Local<Value>()); ENTER_V8(isolate); - i::Handle<i::Object> result; + i::MaybeHandle<i::Object> maybe_result; EXCEPTION_PREAMBLE(isolate); if (data.IsEmpty()) { - result = isolate->debugger()->Call(Utils::OpenHandle(*fun), - isolate->factory()->undefined_value(), - &has_pending_exception); + maybe_result = isolate->debugger()->Call( + Utils::OpenHandle(*fun), isolate->factory()->undefined_value()); } else { - result = isolate->debugger()->Call(Utils::OpenHandle(*fun), - Utils::OpenHandle(*data), - &has_pending_exception); + maybe_result = isolate->debugger()->Call( + Utils::OpenHandle(*fun), Utils::OpenHandle(*data)); } + i::Handle<i::Object> result; + has_pending_exception = !maybe_result.ToHandle(&result); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return Utils::ToLocal(result); } @@ -6951,18 +6900,23 @@ Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) { ENTER_V8(isolate); v8::EscapableHandleScope scope(reinterpret_cast<Isolate*>(isolate)); i::Debug* isolate_debug = isolate->debug(); - isolate_debug->Load(); - i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global_object()); - i::Handle<i::String> name = isolate->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("MakeMirror")); - i::Handle<i::Object> fun_obj = i::GetProperty(isolate, debug, name); - i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(fun_obj); - v8::Handle<v8::Function> v8_fun = Utils::ToLocal(fun); - const int kArgc = 1; - v8::Handle<v8::Value> argv[kArgc] = { obj }; EXCEPTION_PREAMBLE(isolate); - v8::Local<v8::Value> result = - v8_fun->Call(Utils::ToLocal(debug), kArgc, argv); + has_pending_exception = !isolate_debug->Load(); + v8::Local<v8::Value> result; + if (!has_pending_exception) { + i::Handle<i::JSObject> debug( + isolate_debug->debug_context()->global_object()); + i::Handle<i::String> name = isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("MakeMirror")); + i::Handle<i::Object> fun_obj = + i::Object::GetProperty(debug, name).ToHandleChecked(); + i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(fun_obj); + v8::Handle<v8::Function> v8_fun = Utils::ToLocal(fun); + const int kArgc = 1; + v8::Handle<v8::Value> argv[kArgc] = { obj }; + result = v8_fun->Call(Utils::ToLocal(debug), kArgc, argv); + has_pending_exception = result.IsEmpty(); + } EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return scope.Escape(result); } @@ -6992,35 +6946,25 @@ Local<Context> Debug::GetDebugContext() { } -void Debug::SetLiveEditEnabled(bool enable, Isolate* isolate) { - // If no isolate is supplied, use the default isolate. - i::Debugger* debugger; - if (isolate != NULL) { - i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); - debugger = internal_isolate->debugger(); - } else { - debugger = i::Isolate::GetDefaultIsolateDebugger(); - } - debugger->set_live_edit_enabled(enable); +void Debug::SetLiveEditEnabled(Isolate* isolate, bool enable) { + i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); + internal_isolate->debugger()->set_live_edit_enabled(enable); } -#endif // ENABLE_DEBUGGER_SUPPORT - - Handle<String> CpuProfileNode::GetFunctionName() const { i::Isolate* isolate = i::Isolate::Current(); const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this); const i::CodeEntry* entry = node->entry(); + i::Handle<i::String> name = + isolate->factory()->InternalizeUtf8String(entry->name()); if (!entry->has_name_prefix()) { - return ToApiHandle<String>( - isolate->factory()->InternalizeUtf8String(entry->name())); + return ToApiHandle<String>(name); } else { + // We do not expect this to fail. Change this if it does. i::Handle<i::String> cons = isolate->factory()->NewConsString( isolate->factory()->InternalizeUtf8String(entry->name_prefix()), - isolate->factory()->InternalizeUtf8String(entry->name())); - // We do not expect this to fail. Change this if it does. - CHECK(!cons.is_null()); + name).ToHandleChecked(); return ToApiHandle<String>(cons); } } @@ -7113,15 +7057,21 @@ const CpuProfileNode* CpuProfile::GetSample(int index) const { } +int64_t CpuProfile::GetSampleTimestamp(int index) const { + const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this); + return (profile->sample_timestamp(index) - i::TimeTicks()).InMicroseconds(); +} + + int64_t CpuProfile::GetStartTime() const { const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this); - return (profile->start_time() - i::Time::UnixEpoch()).InMicroseconds(); + return (profile->start_time() - i::TimeTicks()).InMicroseconds(); } int64_t CpuProfile::GetEndTime() const { const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this); - return (profile->end_time() - i::Time::UnixEpoch()).InMicroseconds(); + return (profile->end_time() - i::TimeTicks()).InMicroseconds(); } diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h index 128087c89..f530e56f9 100644 --- a/deps/v8/src/api.h +++ b/deps/v8/src/api.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_API_H_ #define V8_API_H_ @@ -186,9 +163,9 @@ class RegisteredExtension { V(Script, JSFunction) \ V(UnboundScript, SharedFunctionInfo) \ V(Function, JSFunction) \ - V(Message, JSObject) \ + V(Message, JSMessageObject) \ V(Context, Context) \ - V(External, Foreign) \ + V(External, Object) \ V(StackTrace, JSArray) \ V(StackFrame, JSObject) \ V(DeclaredAccessorDescriptor, DeclaredAccessorDescriptor) @@ -393,8 +370,8 @@ MAKE_TO_LOCAL(ToLocal, DeclaredAccessorDescriptor, DeclaredAccessorDescriptor) const v8::From* that, bool allow_empty_handle) { \ EXTRA_CHECK(allow_empty_handle || that != NULL); \ EXTRA_CHECK(that == NULL || \ - !(*reinterpret_cast<v8::internal::To**>( \ - const_cast<v8::From*>(that)))->IsFailure()); \ + (*reinterpret_cast<v8::internal::Object**>( \ + const_cast<v8::From*>(that)))->Is##To()); \ return v8::internal::Handle<v8::internal::To>( \ reinterpret_cast<v8::internal::To**>(const_cast<v8::From*>(that))); \ } diff --git a/deps/v8/src/apinatives.js b/deps/v8/src/apinatives.js index 6431901bf..0579caf54 100644 --- a/deps/v8/src/apinatives.js +++ b/deps/v8/src/apinatives.js @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file contains infrastructure used by the API. See // v8natives.js for an explanation of these files are processed and @@ -71,31 +48,29 @@ function InstantiateFunction(data, name) { (serialNumber in cache) && (cache[serialNumber] != kUninitialized); if (!isFunctionCached) { try { - var fun = %CreateApiFunction(data); - if (name) %FunctionSetName(fun, name); var flags = %GetTemplateField(data, kApiFlagOffset); - var doNotCache = flags & (1 << kDoNotCacheBit); - if (!doNotCache) cache[serialNumber] = fun; - if (flags & (1 << kRemovePrototypeBit)) { - %FunctionRemovePrototype(fun); - } else { - var prototype = %GetTemplateField(data, kApiPrototypeTemplateOffset); - // Note: Do not directly use an object template as a condition, our - // internal ToBoolean doesn't handle that! - fun.prototype = typeof prototype === 'undefined' ? - {} : Instantiate(prototype); - if (flags & (1 << kReadOnlyPrototypeBit)) { - %FunctionSetReadOnlyPrototype(fun); - } - %SetProperty(fun.prototype, "constructor", fun, DONT_ENUM); + var has_proto = !(flags & (1 << kRemovePrototypeBit)); + var prototype; + if (has_proto) { + var template = %GetTemplateField(data, kApiPrototypeTemplateOffset); + prototype = typeof template === 'undefined' + ? {} : Instantiate(template); + var parent = %GetTemplateField(data, kApiParentTemplateOffset); // Note: Do not directly use a function template as a condition, our // internal ToBoolean doesn't handle that! - if (!(typeof parent === 'undefined')) { + if (typeof parent !== 'undefined') { var parent_fun = Instantiate(parent); - %SetPrototype(fun.prototype, parent_fun.prototype); + %SetPrototype(prototype, parent_fun.prototype); } } + var fun = %CreateApiFunction(data, prototype); + if (name) %FunctionSetName(fun, name); + var doNotCache = flags & (1 << kDoNotCacheBit); + if (!doNotCache) cache[serialNumber] = fun; + if (has_proto && flags & (1 << kReadOnlyPrototypeBit)) { + %FunctionSetReadOnlyPrototype(fun); + } ConfigureTemplateInstance(fun, data); if (doNotCache) return fun; } catch (e) { diff --git a/deps/v8/src/arguments.cc b/deps/v8/src/arguments.cc index 205da7c68..72420eea8 100644 --- a/deps/v8/src/arguments.cc +++ b/deps/v8/src/arguments.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "arguments.h" diff --git a/deps/v8/src/arguments.h b/deps/v8/src/arguments.h index b7137c317..eb75724f2 100644 --- a/deps/v8/src/arguments.h +++ b/deps/v8/src/arguments.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARGUMENTS_H_ #define V8_ARGUMENTS_H_ @@ -299,10 +276,10 @@ double ClobberDoubleRegisters(double x1, double x2, double x3, double x4); #endif -#define DECLARE_RUNTIME_FUNCTION(Type, Name) \ -Type Name(int args_length, Object** args_object, Isolate* isolate) +#define DECLARE_RUNTIME_FUNCTION(Name) \ +Object* Name(int args_length, Object** args_object, Isolate* isolate) -#define RUNTIME_FUNCTION(Type, Name) \ +#define RUNTIME_FUNCTION_RETURNS_TYPE(Type, Name) \ static Type __RT_impl_##Name(Arguments args, Isolate* isolate); \ Type Name(int args_length, Object** args_object, Isolate* isolate) { \ CLOBBER_DOUBLE_REGISTERS(); \ @@ -311,6 +288,11 @@ Type Name(int args_length, Object** args_object, Isolate* isolate) { \ } \ static Type __RT_impl_##Name(Arguments args, Isolate* isolate) + +#define RUNTIME_FUNCTION(Name) RUNTIME_FUNCTION_RETURNS_TYPE(Object*, Name) +#define RUNTIME_FUNCTION_RETURN_PAIR(Name) \ + RUNTIME_FUNCTION_RETURNS_TYPE(ObjectPair, Name) + #define RUNTIME_ARGUMENTS(isolate, args) \ args.length(), args.arguments(), isolate diff --git a/deps/v8/src/arm/assembler-arm-inl.h b/deps/v8/src/arm/assembler-arm-inl.h index d966380c1..f5612e463 100644 --- a/deps/v8/src/arm/assembler-arm-inl.h +++ b/deps/v8/src/arm/assembler-arm-inl.h @@ -222,7 +222,7 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) { } -static const int kNoCodeAgeSequenceLength = 3; +static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize; Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { @@ -234,15 +234,15 @@ Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { Code* RelocInfo::code_age_stub() { ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); return Code::GetCodeFromTargetAddress( - Memory::Address_at(pc_ + Assembler::kInstrSize * - (kNoCodeAgeSequenceLength - 1))); + Memory::Address_at(pc_ + + (kNoCodeAgeSequenceLength - Assembler::kInstrSize))); } void RelocInfo::set_code_age_stub(Code* stub) { ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); - Memory::Address_at(pc_ + Assembler::kInstrSize * - (kNoCodeAgeSequenceLength - 1)) = + Memory::Address_at(pc_ + + (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) = stub->instruction_start(); } @@ -323,14 +323,12 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { visitor->VisitExternalReference(this); } else if (RelocInfo::IsCodeAgeSequence(mode)) { visitor->VisitCodeAgeSequence(this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence())) && isolate->debug()->has_break_points()) { visitor->VisitDebugTarget(this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { visitor->VisitRuntimeEntry(this); } @@ -350,14 +348,12 @@ void RelocInfo::Visit(Heap* heap) { StaticVisitor::VisitExternalReference(this); } else if (RelocInfo::IsCodeAgeSequence(mode)) { StaticVisitor::VisitCodeAgeSequence(heap, this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (heap->isolate()->debug()->has_break_points() && ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence()))) { StaticVisitor::VisitDebugTarget(heap, this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { StaticVisitor::VisitRuntimeEntry(this); } diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc index 297cdcc03..74fd61979 100644 --- a/deps/v8/src/arm/assembler-arm.cc +++ b/deps/v8/src/arm/assembler-arm.cc @@ -100,10 +100,11 @@ const char* DwVfpRegister::AllocationIndexToString(int index) { } -void CpuFeatures::Probe() { +void CpuFeatures::Probe(bool serializer_enabled) { uint64_t standard_features = static_cast<unsigned>( OS::CpuFeaturesImpliedByPlatform()) | CpuFeaturesImpliedByCompiler(); - ASSERT(supported_ == 0 || supported_ == standard_features); + ASSERT(supported_ == 0 || + (supported_ & standard_features) == standard_features); #ifdef DEBUG initialized_ = true; #endif @@ -113,10 +114,8 @@ void CpuFeatures::Probe() { // snapshot. supported_ |= standard_features; - if (Serializer::enabled()) { + if (serializer_enabled) { // No probing for features if we might serialize (generate snapshot). - printf(" "); - PrintFeatures(); return; } @@ -1077,15 +1076,11 @@ static bool fits_shifter(uint32_t imm32, // if they can be encoded in the ARM's 12 bits of immediate-offset instruction // space. There is no guarantee that the relocated location can be similarly // encoded. -bool Operand::must_output_reloc_info(const Assembler* assembler) const { +bool Operand::must_output_reloc_info(Isolate* isolate, + const Assembler* assembler) const { if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif // def DEBUG if (assembler != NULL && assembler->predictable_code_size()) return true; - return Serializer::enabled(); + return Serializer::enabled(isolate); } else if (RelocInfo::IsNone(rmode_)) { return false; } @@ -1093,7 +1088,8 @@ bool Operand::must_output_reloc_info(const Assembler* assembler) const { } -static bool use_mov_immediate_load(const Operand& x, +static bool use_mov_immediate_load(Isolate* isolate, + const Operand& x, const Assembler* assembler) { if (assembler != NULL && !assembler->can_use_constant_pool()) { // If there is no constant pool available, we must use an mov immediate. @@ -1104,7 +1100,7 @@ static bool use_mov_immediate_load(const Operand& x, (assembler == NULL || !assembler->predictable_code_size())) { // Prefer movw / movt to constant pool if it is more efficient on the CPU. return true; - } else if (x.must_output_reloc_info(assembler)) { + } else if (x.must_output_reloc_info(isolate, assembler)) { // Prefer constant pool if data is likely to be patched. return false; } else { @@ -1114,17 +1110,18 @@ static bool use_mov_immediate_load(const Operand& x, } -bool Operand::is_single_instruction(const Assembler* assembler, +bool Operand::is_single_instruction(Isolate* isolate, + const Assembler* assembler, Instr instr) const { if (rm_.is_valid()) return true; uint32_t dummy1, dummy2; - if (must_output_reloc_info(assembler) || + if (must_output_reloc_info(isolate, assembler) || !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { // The immediate operand cannot be encoded as a shifter operand, or use of // constant pool is required. For a mov instruction not setting the // condition code additional instruction conventions can be used. if ((instr & ~kCondMask) == 13*B21) { // mov, S not set - return !use_mov_immediate_load(*this, assembler); + return !use_mov_immediate_load(isolate, *this, assembler); } else { // If this is not a mov or mvn instruction there will always an additional // instructions - either mov or ldr. The mov might actually be two @@ -1144,15 +1141,16 @@ void Assembler::move_32_bit_immediate(Register rd, const Operand& x, Condition cond) { RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); - if (x.must_output_reloc_info(this)) { + if (x.must_output_reloc_info(isolate(), this)) { RecordRelocInfo(rinfo); } - if (use_mov_immediate_load(x, this)) { + if (use_mov_immediate_load(isolate(), x, this)) { Register target = rd.code() == pc.code() ? ip : rd; // TODO(rmcilroy): add ARMv6 support for immediate loads. ASSERT(CpuFeatures::IsSupported(ARMv7)); - if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { + if (!FLAG_enable_ool_constant_pool && + x.must_output_reloc_info(isolate(), this)) { // Make sure the movw/movt doesn't get separated. BlockConstPoolFor(2); } @@ -1180,7 +1178,7 @@ void Assembler::addrmod1(Instr instr, // Immediate. uint32_t rotate_imm; uint32_t immed_8; - if (x.must_output_reloc_info(this) || + if (x.must_output_reloc_info(isolate(), this) || !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { // The immediate operand cannot be encoded as a shifter operand, so load // it first to register ip and change the original instruction to use ip. @@ -1862,7 +1860,7 @@ void Assembler::msr(SRegisterFieldMask fields, const Operand& src, // Immediate. uint32_t rotate_imm; uint32_t immed_8; - if (src.must_output_reloc_info(this) || + if (src.must_output_reloc_info(isolate(), this) || !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { // Immediate operand cannot be encoded, load it first to register ip. move_32_bit_immediate(ip, src); @@ -2827,8 +2825,9 @@ void Assembler::vcvt_f64_s32(const DwVfpRegister dst, ASSERT(CpuFeatures::IsSupported(VFP3)); int vd, d; dst.split_code(&vd, &d); - int i = ((32 - fraction_bits) >> 4) & 1; - int imm4 = (32 - fraction_bits) & 0xf; + int imm5 = 32 - fraction_bits; + int i = imm5 & 1; + int imm4 = (imm5 >> 1) & 0xf; emit(cond | 0xE*B24 | B23 | d*B22 | 0x3*B20 | B19 | 0x2*B16 | vd*B12 | 0x5*B9 | B8 | B7 | B6 | i*B5 | imm4); } @@ -3161,9 +3160,7 @@ void Assembler::RecordComment(const char* msg) { void Assembler::RecordConstPool(int size) { // We only need this for debugger support, to correctly compute offsets in the // code. -#ifdef ENABLE_DEBUGGER_SUPPORT RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); -#endif } @@ -3266,12 +3263,7 @@ void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { if (!RelocInfo::IsNone(rinfo.rmode())) { // Don't record external references unless the heap will be serialized. if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - if (!Serializer::enabled() && !emit_debug_code()) { + if (!Serializer::enabled(isolate()) && !emit_debug_code()) { return; } } @@ -3502,7 +3494,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { // data bool found = false; - if (!Serializer::enabled() && (rinfo.rmode() >= RelocInfo::CELL)) { + if (!Serializer::enabled(isolate()) && + (rinfo.rmode() >= RelocInfo::CELL)) { for (int j = 0; j < i; j++) { RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; @@ -3547,14 +3540,15 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { } -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { - ASSERT(FLAG_enable_ool_constant_pool); - return constant_pool_builder_.Allocate(heap); +Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { + if (!FLAG_enable_ool_constant_pool) { + return isolate->factory()->empty_constant_pool_array(); + } + return constant_pool_builder_.New(isolate); } void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - ASSERT(FLAG_enable_ool_constant_pool); constant_pool_builder_.Populate(this, constant_pool); } @@ -3605,7 +3599,7 @@ void ConstantPoolBuilder::AddEntry(Assembler* assm, // Try to merge entries which won't be patched. int merged_index = -1; if (RelocInfo::IsNone(rmode) || - (!Serializer::enabled() && (rmode >= RelocInfo::CELL))) { + (!Serializer::enabled(assm->isolate()) && (rmode >= RelocInfo::CELL))) { size_t i; std::vector<RelocInfo>::const_iterator it; for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { @@ -3654,12 +3648,14 @@ void ConstantPoolBuilder::Relocate(int pc_delta) { } -MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap) { +Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) { if (IsEmpty()) { - return heap->empty_constant_pool_array(); + return isolate->factory()->empty_constant_pool_array(); } else { - return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_, - count_of_heap_ptr_, count_of_32bit_); + return isolate->factory()->NewConstantPoolArray(count_of_64bit_, + count_of_code_ptr_, + count_of_heap_ptr_, + count_of_32bit_); } } diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h index 727b05421..1c6a7f04f 100644 --- a/deps/v8/src/arm/assembler-arm.h +++ b/deps/v8/src/arm/assembler-arm.h @@ -56,7 +56,7 @@ class CpuFeatures : public AllStatic { public: // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). - static void Probe(); + static void Probe(bool serializer_enabled); // Display target use when compiling. static void PrintTarget(); @@ -70,15 +70,11 @@ class CpuFeatures : public AllStatic { return Check(f, supported_); } - static bool IsFoundByRuntimeProbingOnly(CpuFeature f) { - ASSERT(initialized_); - return Check(f, found_by_runtime_probing_only_); - } - - static bool IsSafeForSnapshot(CpuFeature f) { + static bool IsSafeForSnapshot(Isolate* isolate, CpuFeature f) { return Check(f, cross_compile_) || (IsSupported(f) && - (!Serializer::enabled() || !IsFoundByRuntimeProbingOnly(f))); + !(Serializer::enabled(isolate) && + Check(f, found_by_runtime_probing_only_))); } static unsigned cache_line_size() { return cache_line_size_; } @@ -93,6 +89,8 @@ class CpuFeatures : public AllStatic { (cross_compile_ & mask) == mask; } + static bool SupportsCrankshaft() { return CpuFeatures::IsSupported(VFP3); } + private: static bool Check(CpuFeature f, unsigned set) { return (set & flag2set(f)) != 0; @@ -590,8 +588,11 @@ class Operand BASE_EMBEDDED { // the instruction this operand is used for is a MOV or MVN instruction the // actual instruction to use is required for this calculation. For other // instructions instr is ignored. - bool is_single_instruction(const Assembler* assembler, Instr instr = 0) const; - bool must_output_reloc_info(const Assembler* assembler) const; + bool is_single_instruction(Isolate* isolate, + const Assembler* assembler, + Instr instr = 0) const; + bool must_output_reloc_info(Isolate* isolate, + const Assembler* assembler) const; inline int32_t immediate() const { ASSERT(!rm_.is_valid()); @@ -714,7 +715,7 @@ class ConstantPoolBuilder BASE_EMBEDDED { void AddEntry(Assembler* assm, const RelocInfo& rinfo); void Relocate(int pc_delta); bool IsEmpty(); - MaybeObject* Allocate(Heap* heap); + Handle<ConstantPoolArray> New(Isolate* isolate); void Populate(Assembler* assm, ConstantPoolArray* constant_pool); inline int count_of_64bit() const { return count_of_64bit_; } @@ -728,6 +729,8 @@ class ConstantPoolBuilder BASE_EMBEDDED { bool IsCodePtrEntry(RelocInfo::Mode rmode); bool IsHeapPtrEntry(RelocInfo::Mode rmode); + // TODO(rmcilroy): This should ideally be a ZoneList, however that would mean + // RelocInfo would need to subclass ZoneObject which it currently doesn't. std::vector<RelocInfo> entries_; std::vector<int> merged_indexes_; int count_of_64bit_; @@ -1498,7 +1501,7 @@ class Assembler : public AssemblerBase { void CheckConstPool(bool force_emit, bool require_jump); // Allocate a constant pool of the correct size for the generated code. - MaybeObject* AllocateConstantPool(Heap* heap); + Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate); // Generate the constant pool for the generated code. void PopulateConstantPool(ConstantPoolArray* constant_pool); diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc index f13814641..2e5cc7398 100644 --- a/deps/v8/src/arm/builtins-arm.cc +++ b/deps/v8/src/arm/builtins-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -376,14 +353,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Label rt_call, allocated; if (FLAG_inline_new) { Label undo_allocation; -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = ExternalReference::debug_step_in_fp_address(isolate); __ mov(r2, Operand(debug_step_in_fp)); __ ldr(r2, MemOperand(r2)); __ tst(r2, r2); __ b(ne, &rt_call); -#endif // Load the initial map and verify that it is in fact a map. // r1: constructor function @@ -807,7 +782,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, if (is_construct) { // No type feedback cell is available __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ CallStub(&stub); } else { ParameterCount actual(r0); @@ -923,7 +898,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); // Jump to point after the code-age stub. - __ add(r0, r0, Operand(kNoCodeAgeSequenceLength * Assembler::kInstrSize)); + __ add(r0, r0, Operand(kNoCodeAgeSequenceLength)); __ mov(pc, r0); } @@ -1284,7 +1259,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // Out of stack space. __ ldr(r1, MemOperand(fp, kFunctionOffset)); __ Push(r1, r0); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); // End of stack check. // Push current limit and index. @@ -1407,6 +1382,26 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { } +static void ArgumentAdaptorStackCheck(MacroAssembler* masm, + Label* stack_overflow) { + // ----------- S t a t e ------------- + // -- r0 : actual number of arguments + // -- r1 : function (passed through to callee) + // -- r2 : expected number of arguments + // ----------------------------------- + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); + // Make r5 the space we have left. The stack might already be overflowed + // here which will cause r5 to become negative. + __ sub(r5, sp, r5); + // Check if the arguments will overflow the stack. + __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2)); + __ b(le, stack_overflow); // Signed comparison. +} + + static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ SmiTag(r0); __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); @@ -1446,6 +1441,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // -- r2 : expected number of arguments // ----------------------------------- + Label stack_overflow; + ArgumentAdaptorStackCheck(masm, &stack_overflow); Label invoke, dont_adapt_arguments; Label enough, too_few; @@ -1545,6 +1542,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ------------------------------------------- __ bind(&dont_adapt_arguments); __ Jump(r3); + + __ bind(&stack_overflow); + { + FrameScope frame(masm, StackFrame::MANUAL); + EnterArgumentsAdaptorFrame(masm); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + __ bkpt(0); + } } diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index 832296b27..7b2935106 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -39,7 +16,6 @@ namespace internal { void FastNewClosureStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r2 }; descriptor->register_param_count_ = 1; @@ -50,7 +26,6 @@ void FastNewClosureStub::InitializeInterfaceDescriptor( void FastNewContextStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1 }; descriptor->register_param_count_ = 1; @@ -60,7 +35,6 @@ void FastNewContextStub::InitializeInterfaceDescriptor( void ToNumberStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0 }; descriptor->register_param_count_ = 1; @@ -70,7 +44,6 @@ void ToNumberStub::InitializeInterfaceDescriptor( void NumberToStringStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0 }; descriptor->register_param_count_ = 1; @@ -81,7 +54,6 @@ void NumberToStringStub::InitializeInterfaceDescriptor( void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r3, r2, r1 }; descriptor->register_param_count_ = 3; @@ -93,7 +65,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r3, r2, r1, r0 }; descriptor->register_param_count_ = 4; @@ -104,7 +75,6 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r2, r3 }; descriptor->register_param_count_ = 2; @@ -114,7 +84,6 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor( void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r0 }; descriptor->register_param_count_ = 2; @@ -125,7 +94,6 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r0 }; descriptor->register_param_count_ = 2; @@ -136,7 +104,6 @@ void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( void RegExpConstructResultStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r2, r1, r0 }; descriptor->register_param_count_ = 3; @@ -147,7 +114,6 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor( void LoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0 }; descriptor->register_param_count_ = 1; @@ -157,7 +123,6 @@ void LoadFieldStub::InitializeInterfaceDescriptor( void KeyedLoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1 }; descriptor->register_param_count_ = 1; @@ -167,7 +132,6 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor( void StringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0, r2 }; descriptor->register_param_count_ = 2; @@ -177,7 +141,6 @@ void StringLengthStub::InitializeInterfaceDescriptor( void KeyedStringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r0 }; descriptor->register_param_count_ = 2; @@ -187,7 +150,6 @@ void KeyedStringLengthStub::InitializeInterfaceDescriptor( void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r2, r1, r0 }; descriptor->register_param_count_ = 3; @@ -198,7 +160,6 @@ void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( void TransitionElementsKindStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0, r1 }; descriptor->register_param_count_ = 2; @@ -210,7 +171,6 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor( void CompareNilICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0 }; descriptor->register_param_count_ = 1; @@ -218,12 +178,11 @@ void CompareNilICStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(CompareNilIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); } static void InitializeArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -252,7 +211,6 @@ static void InitializeArrayConstructorDescriptor( static void InitializeInternalArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -280,28 +238,24 @@ static void InitializeInternalArrayConstructorDescriptor( void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeArrayConstructorDescriptor(descriptor, 0); } void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeArrayConstructorDescriptor(descriptor, 1); } void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeArrayConstructorDescriptor(descriptor, -1); } void ToBooleanStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0 }; descriptor->register_param_count_ = 1; @@ -309,33 +263,29 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); } void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeInternalArrayConstructorDescriptor(descriptor, 0); } void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeInternalArrayConstructorDescriptor(descriptor, 1); } void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeInternalArrayConstructorDescriptor(descriptor, -1); } void StoreGlobalStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r2, r0 }; descriptor->register_param_count_ = 3; @@ -346,7 +296,6 @@ void StoreGlobalStub::InitializeInterfaceDescriptor( void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r0, r3, r1, r2 }; descriptor->register_param_count_ = 4; @@ -357,19 +306,17 @@ void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( void BinaryOpICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r0 }; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); } void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r2, r1, r0 }; descriptor->register_param_count_ = 3; @@ -380,7 +327,6 @@ void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( void StringAddStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { r1, r0 }; descriptor->register_param_count_ = 2; @@ -504,10 +450,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // Update the static counter each time a new code stub is generated. - Isolate* isolate = masm->isolate(); - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); - CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); + CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); int param_count = descriptor->register_param_count_; { // Call the runtime system in a fresh internal frame. @@ -533,11 +478,13 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // stub so you don't have to set up the frame. class ConvertToDoubleStub : public PlatformCodeStub { public: - ConvertToDoubleStub(Register result_reg_1, + ConvertToDoubleStub(Isolate* isolate, + Register result_reg_1, Register result_reg_2, Register source_reg, Register scratch_reg) - : result1_(result_reg_1), + : PlatformCodeStub(isolate), + result1_(result_reg_1), result2_(result_reg_2), source_(source_reg), zeros_(scratch_reg) { } @@ -726,10 +673,10 @@ void DoubleToIStub::Generate(MacroAssembler* masm) { void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - WriteInt32ToHeapNumberStub stub1(r1, r0, r2); - WriteInt32ToHeapNumberStub stub2(r2, r0, r3); - stub1.GetCode(isolate); - stub2.GetCode(isolate); + WriteInt32ToHeapNumberStub stub1(isolate, r1, r0, r2); + WriteInt32ToHeapNumberStub stub2(isolate, r2, r0, r3); + stub1.GetCode(); + stub2.GetCode(); } @@ -1124,7 +1071,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { __ bind(&both_loaded_as_doubles); // The arguments have been converted to doubles and stored in d6 and d7, if // VFP3 is supported, or in r0, r1, r2, and r3. - Isolate* isolate = masm->isolate(); __ bind(&lhs_not_nan); Label no_nan; // ARMv7 VFP3 instructions to implement double precision comparison. @@ -1187,7 +1133,8 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, r2, r3, &slow); - __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3); + __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2, + r3); if (cc == eq) { StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, @@ -1251,9 +1198,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); - __ mov(r0, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ mov(r0, Operand(ExternalReference::isolate_address(isolate()))); __ CallCFunction( - ExternalReference::store_buffer_overflow_function(masm->isolate()), + ExternalReference::store_buffer_overflow_function(isolate()), argument_count); if (save_doubles_ == kSaveFPRegs) { __ RestoreFPRegs(sp, scratch); @@ -1373,7 +1320,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(0, 2, scratch); __ MovToFloatParameters(double_base, double_exponent); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); } __ pop(lr); @@ -1424,11 +1371,11 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ vcvt_f64_s32(double_exponent, single_scratch); // Returning or bailing out. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); if (exponent_type_ == ON_STACK) { // The arguments are still on the stack. __ bind(&call_runtime); - __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); + __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); // The stub is called from non-optimized code, which expects the result // as heap number in exponent. @@ -1447,7 +1394,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(0, 2, scratch); __ MovToFloatParameters(double_base, double_exponent); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); } __ pop(lr); @@ -1479,61 +1426,57 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { void CodeStub::GenerateFPStubs(Isolate* isolate) { SaveFPRegsMode mode = kSaveFPRegs; - CEntryStub save_doubles(1, mode); - StoreBufferOverflowStub stub(mode); + CEntryStub save_doubles(isolate, 1, mode); + StoreBufferOverflowStub stub(isolate, mode); // These stubs might already be in the snapshot, detect that and don't // regenerate, which would lead to code stub initialization state being messed // up. Code* save_doubles_code; - if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { - save_doubles_code = *save_doubles.GetCode(isolate); + if (!save_doubles.FindCodeInCache(&save_doubles_code)) { + save_doubles_code = *save_doubles.GetCode(); } Code* store_buffer_overflow_code; - if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) { - store_buffer_overflow_code = *stub.GetCode(isolate); + if (!stub.FindCodeInCache(&store_buffer_overflow_code)) { + store_buffer_overflow_code = *stub.GetCode(); } isolate->set_fp_stubs_generated(true); } void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { - CEntryStub stub(1, kDontSaveFPRegs); - stub.GetCode(isolate); + CEntryStub stub(isolate, 1, kDontSaveFPRegs); + stub.GetCode(); } -void CEntryStub::GenerateCore(MacroAssembler* masm, - Label* throw_normal_exception, - Label* throw_termination_exception, - bool do_gc, - bool always_allocate) { - // r0: result parameter for PerformGC, if any - // r4: number of arguments including receiver (C callee-saved) - // r5: pointer to builtin function (C callee-saved) - // r6: pointer to the first argument (C callee-saved) - Isolate* isolate = masm->isolate(); - - if (do_gc) { - // Passing r0. - __ PrepareCallCFunction(2, 0, r1); - __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); - __ CallCFunction(ExternalReference::perform_gc_function(isolate), - 2, 0); - } +void CEntryStub::Generate(MacroAssembler* masm) { + // Called from JavaScript; parameters are on stack as if calling JS function. + // r0: number of arguments including receiver + // r1: pointer to builtin function + // fp: frame pointer (restored after C call) + // sp: stack pointer (restored as callee's sp after C call) + // cp: current context (C callee-saved) - ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(isolate); - if (always_allocate) { - __ mov(r0, Operand(scope_depth)); - __ ldr(r1, MemOperand(r0)); - __ add(r1, r1, Operand(1)); - __ str(r1, MemOperand(r0)); - } + ProfileEntryHookStub::MaybeCallEntryHook(masm); - // Call C built-in. - // r0 = argc, r1 = argv - __ mov(r0, Operand(r4)); - __ mov(r1, Operand(r6)); + __ mov(r5, Operand(r1)); + + // Compute the argv pointer in a callee-saved register. + __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2)); + __ sub(r1, r1, Operand(kPointerSize)); + + // Enter the exit frame that transitions from JavaScript to C++. + FrameScope scope(masm, StackFrame::MANUAL); + __ EnterExitFrame(save_doubles_); + + // Store a copy of argc in callee-saved registers for later. + __ mov(r4, Operand(r0)); + + // r0, r4: number of arguments including receiver (C callee-saved) + // r1: pointer to the first argument (C callee-saved) + // r5: pointer to builtin function (C callee-saved) + + // Result returned in r0 or r0+r1 by default. #if V8_HOST_ARCH_ARM int frame_alignment = MacroAssembler::ActivationFrameAlignment(); @@ -1551,7 +1494,9 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, } #endif - __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); + // Call C built-in. + // r0 = argc, r1 = argv + __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); // To let the GC traverse the return address of the exit frames, we need to // know where the return address is. The CEntryStub is unmovable, so @@ -1570,132 +1515,67 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ VFPEnsureFPSCRState(r2); - if (always_allocate) { - // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1 - // though (contain the result). - __ mov(r2, Operand(scope_depth)); - __ ldr(r3, MemOperand(r2)); - __ sub(r3, r3, Operand(1)); - __ str(r3, MemOperand(r2)); + // Runtime functions should not return 'the hole'. Allowing it to escape may + // lead to crashes in the IC code later. + if (FLAG_debug_code) { + Label okay; + __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); + __ b(ne, &okay); + __ stop("The hole escaped"); + __ bind(&okay); } - // check for failure result - Label failure_returned; - STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); - // Lower 2 bits of r2 are 0 iff r0 has failure tag. - __ add(r2, r0, Operand(1)); - __ tst(r2, Operand(kFailureTagMask)); - __ b(eq, &failure_returned); + // Check result for exception sentinel. + Label exception_returned; + __ CompareRoot(r0, Heap::kExceptionRootIndex); + __ b(eq, &exception_returned); + + ExternalReference pending_exception_address( + Isolate::kPendingExceptionAddress, isolate()); + + // Check that there is no pending exception, otherwise we + // should have returned the exception sentinel. + if (FLAG_debug_code) { + Label okay; + __ mov(r2, Operand(pending_exception_address)); + __ ldr(r2, MemOperand(r2)); + __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); + // Cannot use check here as it attempts to generate call into runtime. + __ b(eq, &okay); + __ stop("Unexpected pending exception"); + __ bind(&okay); + } // Exit C frame and return. // r0:r1: result // sp: stack pointer // fp: frame pointer - // Callee-saved register r4 still holds argc. + // Callee-saved register r4 still holds argc. __ LeaveExitFrame(save_doubles_, r4, true); __ mov(pc, lr); - // check if we should retry or throw exception - Label retry; - __ bind(&failure_returned); - STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); - __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); - __ b(eq, &retry); + // Handling of exception. + __ bind(&exception_returned); // Retrieve the pending exception. - __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); - __ ldr(r0, MemOperand(ip)); + __ mov(r2, Operand(pending_exception_address)); + __ ldr(r0, MemOperand(r2)); // Clear the pending exception. __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); - __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); - __ str(r3, MemOperand(ip)); + __ str(r3, MemOperand(r2)); // Special handling of termination exceptions which are uncatchable // by javascript code. - __ LoadRoot(r3, Heap::kTerminationExceptionRootIndex); - __ cmp(r0, r3); - __ b(eq, throw_termination_exception); - - // Handle normal exception. - __ jmp(throw_normal_exception); - - __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying -} - - -void CEntryStub::Generate(MacroAssembler* masm) { - // Called from JavaScript; parameters are on stack as if calling JS function - // r0: number of arguments including receiver - // r1: pointer to builtin function - // fp: frame pointer (restored after C call) - // sp: stack pointer (restored as callee's sp after C call) - // cp: current context (C callee-saved) - - ProfileEntryHookStub::MaybeCallEntryHook(masm); - - // Result returned in r0 or r0+r1 by default. - - // NOTE: Invocations of builtins may return failure objects - // instead of a proper result. The builtin entry handles - // this by performing a garbage collection and retrying the - // builtin once. - - // Compute the argv pointer in a callee-saved register. - __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); - __ sub(r6, r6, Operand(kPointerSize)); - - // Enter the exit frame that transitions from JavaScript to C++. - FrameAndConstantPoolScope scope(masm, StackFrame::MANUAL); - __ EnterExitFrame(save_doubles_); - - // Set up argc and the builtin function in callee-saved registers. - __ mov(r4, Operand(r0)); - __ mov(r5, Operand(r1)); - - // r4: number of arguments (C callee-saved) - // r5: pointer to builtin function (C callee-saved) - // r6: pointer to first argument (C callee-saved) - - Label throw_normal_exception; Label throw_termination_exception; + __ CompareRoot(r0, Heap::kTerminationExceptionRootIndex); + __ b(eq, &throw_termination_exception); - // Call into the runtime system. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - false, - false); - - // Do space-specific GC and retry runtime call. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - false); - - // Do full GC and retry runtime call one final time. - Failure* failure = Failure::InternalError(); - __ mov(r0, Operand(reinterpret_cast<int32_t>(failure))); - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - true); - - { FrameScope scope(masm, StackFrame::MANUAL); - __ PrepareCallCFunction(0, r0); - __ CallCFunction( - ExternalReference::out_of_memory_function(masm->isolate()), 0, 0); - } + // Handle normal exception. + __ Throw(r0); __ bind(&throw_termination_exception); __ ThrowUncatchable(r0); - - __ bind(&throw_normal_exception); - __ Throw(r0); } @@ -1738,15 +1618,14 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // r2: receiver // r3: argc // r4: argv - Isolate* isolate = masm->isolate(); int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; if (FLAG_enable_ool_constant_pool) { - __ mov(r8, Operand(isolate->factory()->empty_constant_pool_array())); + __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array())); } __ mov(r7, Operand(Smi::FromInt(marker))); __ mov(r6, Operand(Smi::FromInt(marker))); __ mov(r5, - Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); + Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); __ ldr(r5, MemOperand(r5)); __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used. __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | @@ -1758,7 +1637,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // If this is the outermost JS call, set js_entry_sp value. Label non_outermost_js; - ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); + ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); __ mov(r5, Operand(ExternalReference(js_entry_sp))); __ ldr(r6, MemOperand(r5)); __ cmp(r6, Operand::Zero()); @@ -1788,10 +1667,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // fp will be invalid because the PushTryHandler below sets it to 0 to // signal the existence of the JSEntry frame. __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); } __ str(r0, MemOperand(ip)); - __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); + __ LoadRoot(r0, Heap::kExceptionRootIndex); __ b(&exit); // Invoke: Link this frame into the handler chain. There's only one @@ -1805,9 +1684,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // saved values before returning a failure to C. // Clear any pending exceptions. - __ mov(r5, Operand(isolate->factory()->the_hole_value())); + __ mov(r5, Operand(isolate()->factory()->the_hole_value())); __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); __ str(r5, MemOperand(ip)); // Invoke the function by calling through JS entry trampoline builtin. @@ -1822,10 +1701,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // r4: argv if (is_construct) { ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, - isolate); + isolate()); __ mov(ip, Operand(construct_entry)); } else { - ExternalReference entry(Builtins::kJSEntryTrampoline, isolate); + ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); __ mov(ip, Operand(entry)); } __ ldr(ip, MemOperand(ip)); // deref address @@ -1851,7 +1730,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Restore the top frame descriptors from the stack. __ pop(r3); __ mov(ip, - Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); + Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); __ str(r3, MemOperand(ip)); // Reset the stack to the callee saved registers. @@ -2010,7 +1889,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ b(ne, &slow); // Null is not instance of anything. - __ cmp(scratch, Operand(masm->isolate()->factory()->null_value())); + __ cmp(scratch, Operand(isolate()->factory()->null_value())); __ b(ne, &object_not_null); __ mov(r0, Operand(Smi::FromInt(1))); __ Ret(HasArgsInRegisters() ? 0 : 2); @@ -2057,7 +1936,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { // -- r0 : key // -- r1 : receiver // ----------------------------------- - __ cmp(r0, Operand(masm->isolate()->factory()->prototype_string())); + __ cmp(r0, Operand(isolate()->factory()->prototype_string())); __ b(ne, &miss); receiver = r1; } else { @@ -2487,11 +2366,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { Register last_match_info_elements = no_reg; // will be r6; // Ensure that a RegExp stack is allocated. - Isolate* isolate = masm->isolate(); ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(isolate); + ExternalReference::address_of_regexp_stack_memory_address(isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(isolate); + ExternalReference::address_of_regexp_stack_memory_size(isolate()); __ mov(r0, Operand(address_of_regexp_stack_memory_size)); __ ldr(r0, MemOperand(r0, 0)); __ cmp(r0, Operand::Zero()); @@ -2633,7 +2511,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // subject: Subject string // regexp_data: RegExp data (FixedArray) // All checks done. Now push arguments for native regexp code. - __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2); + __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2); // Isolates: note we add an additional parameter here (isolate pointer). const int kRegExpExecuteArguments = 9; @@ -2644,7 +2522,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Arguments are before that on the stack or in registers. // Argument 9 (sp[20]): Pass current isolate address. - __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); + __ mov(r0, Operand(ExternalReference::isolate_address(isolate()))); __ str(r0, MemOperand(sp, 5 * kPointerSize)); // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript. @@ -2666,7 +2544,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 5 (sp[4]): static offsets vector buffer. __ mov(r0, - Operand(ExternalReference::address_of_static_offsets_vector(isolate))); + Operand(ExternalReference::address_of_static_offsets_vector( + isolate()))); __ str(r0, MemOperand(sp, 1 * kPointerSize)); // For arguments 4 and 3 get string length, calculate start of string data and @@ -2697,7 +2576,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Locate the code entry and call it. __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm, r6); __ LeaveExitFrame(false, no_reg, true); @@ -2724,9 +2603,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // stack overflow (on the backtrack stack) was detected in RegExp code but // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. - __ mov(r1, Operand(isolate->factory()->the_hole_value())); + __ mov(r1, Operand(isolate()->factory()->the_hole_value())); __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); __ ldr(r0, MemOperand(r2, 0)); __ cmp(r0, r1); __ b(eq, &runtime); @@ -2746,7 +2625,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ bind(&failure); // For failure and exception return null. - __ mov(r0, Operand(masm->isolate()->factory()->null_value())); + __ mov(r0, Operand(isolate()->factory()->null_value())); __ add(sp, sp, Operand(4 * kPointerSize)); __ Ret(); @@ -2808,7 +2687,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(isolate); + ExternalReference::address_of_static_offsets_vector(isolate()); __ mov(r2, Operand(address_of_static_offsets_vector)); // r1: number of capture registers @@ -2953,7 +2832,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ SmiTag(r0); __ Push(r3, r2, r1, r0); - CreateAllocationSiteStub create_stub; + CreateAllocationSiteStub create_stub(masm->isolate()); __ CallStub(&create_stub); __ Pop(r3, r2, r1, r0); @@ -2977,11 +2856,62 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { } +static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { + // Do not transform the receiver for strict mode functions. + __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); + __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + + kSmiTagSize))); + __ b(ne, cont); + + // Do not transform the receiver for native (Compilerhints already in r3). + __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); + __ b(ne, cont); +} + + +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); + __ b(ne, non_function); + __ push(r1); // put proxy as additional argument + __ mov(r0, Operand(argc + 1, RelocInfo::NONE32)); + __ mov(r2, Operand::Zero()); + __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); + { + Handle<Code> adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ str(r1, MemOperand(sp, argc * kPointerSize)); + __ mov(r0, Operand(argc)); // Set up the number of arguments. + __ mov(r2, Operand::Zero()); + __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); +} + + +static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { + // Wrap the receiver and patch it back onto the stack. + { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); + __ Push(r1, r3); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ pop(r1); + } + __ str(r0, MemOperand(sp, argc * kPointerSize)); + __ jmp(cont); +} + + void CallFunctionStub::Generate(MacroAssembler* masm) { // r1 : the function to call - // r2 : feedback vector - // r3 : (only if r2 is not the megamorphic symbol) slot in feedback - // vector (Smi) Label slow, non_function, wrap, cont; if (NeedsChecks()) { @@ -2992,36 +2922,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Goto slow case if we do not have a function. __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); __ b(ne, &slow); - - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm); - // Type information was updated. Because we may call Array, which - // expects either undefined or an AllocationSite in ebx we need - // to set ebx to undefined. - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - } } // Fast-case: Invoke the function now. // r1: pushed function - ParameterCount actual(argc_); + int argc = argc_; + ParameterCount actual(argc); if (CallAsMethod()) { if (NeedsChecks()) { - // Do not transform the receiver for strict mode functions. - __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); - __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + - kSmiTagSize))); - __ b(ne, &cont); - - // Do not transform the receiver for native (Compilerhints already in r3). - __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); - __ b(ne, &cont); + EmitContinueIfStrictOrNative(masm, &cont); } // Compute the receiver in sloppy mode. - __ ldr(r3, MemOperand(sp, argc_ * kPointerSize)); + __ ldr(r3, MemOperand(sp, argc * kPointerSize)); if (NeedsChecks()) { __ JumpIfSmi(r3, &wrap); @@ -3033,55 +2947,18 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ bind(&cont); } + __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); if (NeedsChecks()) { // Slow-case: Non-function called. __ bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable - // object (megamorphic symbol) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), - masm->isolate()->heap()->megamorphic_symbol()); - __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); - __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); - __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); - } - // Check for function proxy. - __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); - __ b(ne, &non_function); - __ push(r1); // put proxy as additional argument - __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); - __ mov(r2, Operand::Zero()); - __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); - { - Handle<Code> adaptor = - masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); - __ Jump(adaptor, RelocInfo::CODE_TARGET); - } - - // CALL_NON_FUNCTION expects the non-function callee as receiver (instead - // of the original receiver from the call site). - __ bind(&non_function); - __ str(r1, MemOperand(sp, argc_ * kPointerSize)); - __ mov(r0, Operand(argc_)); // Set up the number of arguments. - __ mov(r2, Operand::Zero()); - __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), - RelocInfo::CODE_TARGET); + EmitSlowCase(masm, argc, &non_function); } if (CallAsMethod()) { __ bind(&wrap); - // Wrap the receiver and patch it back onto the stack. - { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); - __ Push(r1, r3); - __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); - __ pop(r1); - } - __ str(r0, MemOperand(sp, argc_ * kPointerSize)); - __ jmp(&cont); + EmitWrapCase(masm, argc, &cont); } } @@ -3145,11 +3022,114 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ bind(&do_call); // Set expected number of arguments to zero (not changing r0). __ mov(r2, Operand::Zero()); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), RelocInfo::CODE_TARGET); } +static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { + __ ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + __ ldr(vector, FieldMemOperand(vector, + JSFunction::kSharedFunctionInfoOffset)); + __ ldr(vector, FieldMemOperand(vector, + SharedFunctionInfo::kFeedbackVectorOffset)); +} + + +void CallICStub::Generate(MacroAssembler* masm) { + // r1 - function + // r3 - slot id (Smi) + Label extra_checks_or_miss, slow_start; + Label slow, non_function, wrap, cont; + Label have_js_function; + int argc = state_.arg_count(); + ParameterCount actual(argc); + + EmitLoadTypeFeedbackVector(masm, r2); + + // The checks. First, does r1 match the recorded monomorphic target? + __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); + __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); + __ cmp(r1, r4); + __ b(ne, &extra_checks_or_miss); + + __ bind(&have_js_function); + if (state_.CallAsMethod()) { + EmitContinueIfStrictOrNative(masm, &cont); + // Compute the receiver in sloppy mode. + __ ldr(r3, MemOperand(sp, argc * kPointerSize)); + + __ JumpIfSmi(r3, &wrap); + __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); + __ b(lt, &wrap); + + __ bind(&cont); + } + + __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); + + __ bind(&slow); + EmitSlowCase(masm, argc, &non_function); + + if (state_.CallAsMethod()) { + __ bind(&wrap); + EmitWrapCase(masm, argc, &cont); + } + + __ bind(&extra_checks_or_miss); + Label miss; + + __ CompareRoot(r4, Heap::kMegamorphicSymbolRootIndex); + __ b(eq, &slow_start); + __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex); + __ b(eq, &miss); + + if (!FLAG_trace_ic) { + // We are going megamorphic, and we don't want to visit the runtime. + __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); + __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); + __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); + __ jmp(&slow_start); + } + + // We are here because tracing is on or we are going monomorphic. + __ bind(&miss); + GenerateMiss(masm); + + // the slow case + __ bind(&slow_start); + // Check that the function is really a JavaScript function. + // r1: pushed function (to be verified) + __ JumpIfSmi(r1, &non_function); + + // Goto slow case if we do not have a function. + __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); + __ b(ne, &slow); + __ jmp(&have_js_function); +} + + +void CallICStub::GenerateMiss(MacroAssembler* masm) { + // Get the receiver of the function from the stack; 1 ~ return address. + __ ldr(r4, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize)); + + { + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); + + // Push the receiver and the function and feedback info. + __ Push(r4, r1, r2, r3); + + // Call the entry. + ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), + masm->isolate()); + __ CallExternalReference(miss, 4); + + // Move result to edi and exit the internal frame. + __ mov(r1, r0); + } +} + + // StringCharCodeAtGenerator void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { Label flat_string; @@ -3512,10 +3492,8 @@ void SubStringStub::Generate(MacroAssembler* masm) { // Make sure first argument is a string. __ ldr(r0, MemOperand(sp, kStringOffset)); - // Do a JumpIfSmi, but fold its jump into the subsequent string test. - __ SmiTst(r0); - Condition is_string = masm->IsObjectStringType(r0, r1, ne); - ASSERT(is_string == eq); + __ JumpIfSmi(r0, &runtime); + Condition is_string = masm->IsObjectStringType(r0, r1); __ b(NegateCondition(is_string), &runtime); Label single_char; @@ -3673,7 +3651,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { masm, r1, r5, r2, r3, r4, r6, r9, DEST_ALWAYS_ALIGNED); __ bind(&return_r0); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); __ Drop(3); __ Ret(); @@ -3808,7 +3786,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop( void StringCompareStub::Generate(MacroAssembler* masm) { Label runtime; - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); // Stack frame on entry. // sp[0]: right string @@ -3842,223 +3820,17 @@ void StringCompareStub::Generate(MacroAssembler* masm) { } -void ArrayPushStub::Generate(MacroAssembler* masm) { - Register receiver = r0; - Register scratch = r1; - - int argc = arguments_count(); - - if (argc == 0) { - // Nothing to do, just return the length. - __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ Drop(argc + 1); - __ Ret(); - return; - } - - Isolate* isolate = masm->isolate(); - - if (argc != 1) { - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - Label call_builtin, attempt_to_grow_elements, with_write_barrier; - - Register elements = r6; - Register end_elements = r5; - // Get the elements array of the object. - __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check that the elements are in fast mode and writable. - __ CheckMap(elements, - scratch, - Heap::kFixedArrayMapRootIndex, - &call_builtin, - DONT_DO_SMI_CHECK); - } - - // Get the array's length into scratch and calculate new length. - __ ldr(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ add(scratch, scratch, Operand(Smi::FromInt(argc))); - - // Get the elements' length. - __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); - - // Check if we could survive without allocation. - __ cmp(scratch, r4); - - const int kEndElementsOffset = - FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - __ b(gt, &attempt_to_grow_elements); - - // Check if value is a smi. - __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); - __ JumpIfNotSmi(r4, &with_write_barrier); - - // Store the value. - // We may need a register containing the address end_elements below, so - // write back the value in end_elements. - __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); - __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); - } else { - // Check if we could survive without allocation. - __ cmp(scratch, r4); - __ b(gt, &call_builtin); - - __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); - __ StoreNumberToDoubleElements(r4, scratch, elements, r5, d0, - &call_builtin, argc * kDoubleSize); - } - - // Save new length. - __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ Drop(argc + 1); - __ mov(r0, scratch); - __ Ret(); - - if (IsFastDoubleElementsKind(elements_kind())) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ bind(&with_write_barrier); - - if (IsFastSmiElementsKind(elements_kind())) { - if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); - - __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset)); - __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); - __ cmp(r9, ip); - __ b(eq, &call_builtin); - - ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) - ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; - __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); - __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); - __ ldr(r3, ContextOperand(r3, Context::JS_ARRAY_MAPS_INDEX)); - const int header_size = FixedArrayBase::kHeaderSize; - // Verify that the object can be transitioned in place. - const int origin_offset = header_size + elements_kind() * kPointerSize; - __ ldr(r2, FieldMemOperand(receiver, origin_offset)); - __ ldr(ip, FieldMemOperand(r3, HeapObject::kMapOffset)); - __ cmp(r2, ip); - __ b(ne, &call_builtin); - - const int target_offset = header_size + target_kind * kPointerSize; - __ ldr(r3, FieldMemOperand(r3, target_offset)); - __ mov(r2, receiver); - ElementsTransitionGenerator::GenerateMapChangeElementsTransition( - masm, DONT_TRACK_ALLOCATION_SITE, NULL); - } - - // Save new length. - __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - - // Store the value. - // We may need a register containing the address end_elements below, so write - // back the value in end_elements. - __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); - __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); - - __ RecordWrite(elements, - end_elements, - r4, - kLRHasNotBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ Drop(argc + 1); - __ mov(r0, scratch); - __ Ret(); - - __ bind(&attempt_to_grow_elements); - // scratch: array's length + 1. - - if (!FLAG_inline_new) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize)); - // Growing elements that are SMI-only requires special handling in case the - // new element is non-Smi. For now, delegate to the builtin. - if (IsFastSmiElementsKind(elements_kind())) { - __ JumpIfNotSmi(r2, &call_builtin); - } - - // We could be lucky and the elements array could be at the top of new-space. - // In this case we can just grow it in place by moving the allocation pointer - // up. - ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(isolate); - ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(isolate); - - const int kAllocationDelta = 4; - ASSERT(kAllocationDelta >= argc); - // Load top and check if it is the end of elements. - __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); - __ add(end_elements, end_elements, Operand(kEndElementsOffset)); - __ mov(r4, Operand(new_space_allocation_top)); - __ ldr(r3, MemOperand(r4)); - __ cmp(end_elements, r3); - __ b(ne, &call_builtin); - - __ mov(r9, Operand(new_space_allocation_limit)); - __ ldr(r9, MemOperand(r9)); - __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); - __ cmp(r3, r9); - __ b(hi, &call_builtin); - - // We fit and could grow elements. - // Update new_space_allocation_top. - __ str(r3, MemOperand(r4)); - // Push the argument. - __ str(r2, MemOperand(end_elements)); - // Fill the rest with holes. - __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); - for (int i = 1; i < kAllocationDelta; i++) { - __ str(r3, MemOperand(end_elements, i * kPointerSize)); - } - - // Update elements' and array's sizes. - __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); - __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta))); - __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); - - // Elements are in new space, so write barrier is not required. - __ Drop(argc + 1); - __ mov(r0, scratch); - __ Ret(); - - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); -} - - void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r1 : left // -- r0 : right // -- lr : return address // ----------------------------------- - Isolate* isolate = masm->isolate(); // Load r2 with the allocation site. We stick an undefined dummy value here // and replace it with the real allocation site later when we instantiate this // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). - __ Move(r2, handle(isolate->heap()->undefined_value())); + __ Move(r2, handle(isolate()->heap()->undefined_value())); // Make sure that we actually patched the allocation site. if (FLAG_debug_code) { @@ -4074,7 +3846,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // Tail call into the stub that handles binary operations with allocation // sites. - BinaryOpWithAllocationSiteStub stub(state_); + BinaryOpWithAllocationSiteStub stub(isolate(), state_); __ TailCallStub(&stub); } @@ -4152,9 +3924,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&unordered); __ bind(&generic_stub); - ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, + ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, CompareIC::GENERIC); - __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ bind(&maybe_undefined1); if (Token::IsOrderedRelationalCompareOp(op_)) { @@ -4377,7 +4149,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { { // Call the runtime system in a fresh internal frame. ExternalReference miss = - ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); + ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ Push(r1, r0); @@ -4409,7 +4181,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) { intptr_t code = - reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); + reinterpret_cast<intptr_t>(GetCode().location()); __ Move(ip, target); __ mov(lr, Operand(code, RelocInfo::CODE_TARGET)); __ blx(lr); // Call the stub. @@ -4485,7 +4257,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, __ stm(db_w, sp, spill_mask); __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); __ mov(r1, Operand(Handle<Name>(name))); - NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP); __ CallStub(&stub); __ cmp(r0, Operand::Zero()); __ ldm(ia_w, sp, spill_mask); @@ -4561,7 +4333,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, __ Move(r0, elements); __ Move(r1, name); } - NameDictionaryLookupStub stub(POSITIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP); __ CallStub(&stub); __ cmp(r0, Operand::Zero()); __ mov(scratch2, Operand(r2)); @@ -4665,11 +4437,11 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - StoreBufferOverflowStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); + StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); // Hydrogen code stubs need stub2 at snapshot time. - StoreBufferOverflowStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } @@ -4774,12 +4546,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { __ Move(address, regs_.address()); __ Move(r0, regs_.object()); __ Move(r1, address); - __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); AllowExternalCallThatCantCauseGC scope(masm); __ CallCFunction( - ExternalReference::incremental_marking_record_write_function( - masm->isolate()), + ExternalReference::incremental_marking_record_write_function(isolate()), argument_count); regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); } @@ -4934,8 +4705,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { - CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); - __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); + __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); int parameter_count_offset = StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; __ ldr(r1, MemOperand(fp, parameter_count_offset)); @@ -4951,8 +4722,9 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != NULL) { - PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize); - ProfileEntryHookStub stub; + ProfileEntryHookStub stub(masm->isolate()); + int code_size = masm->CallStubSize(&stub) + 2 * Assembler::kInstrSize; + PredictableCodeSizeScope predictable(masm, code_size); __ push(lr); __ CallStub(&stub); __ pop(lr); @@ -4998,18 +4770,18 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { #if V8_HOST_ARCH_ARM int32_t entry_hook = - reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook()); + reinterpret_cast<int32_t>(isolate()->function_entry_hook()); __ mov(ip, Operand(entry_hook)); #else // Under the simulator we need to indirect the entry hook through a // trampoline function at a known address. // It additionally takes an isolate as a third parameter - __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); __ mov(ip, Operand(ExternalReference(&dispatcher, ExternalReference::BUILTIN_CALL, - masm->isolate()))); + isolate()))); #endif __ Call(ip); @@ -5027,7 +4799,7 @@ template<class T> static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { if (mode == DISABLE_ALLOCATION_SITES) { - T stub(GetInitialFastElementsKind(), mode); + T stub(masm->isolate(), GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { int last_index = GetSequenceIndexFromFastElementsKind( @@ -5035,7 +4807,7 @@ static void CreateArrayDispatch(MacroAssembler* masm, for (int i = 0; i <= last_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(r3, Operand(kind)); - T stub(kind); + T stub(masm->isolate(), kind); __ TailCallStub(&stub, eq); } @@ -5077,12 +4849,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); - ArraySingleArgumentConstructorStub stub_holey(holey_initial, + ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), + holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); __ bind(&normal_sequence); - ArraySingleArgumentConstructorStub stub(initial, + ArraySingleArgumentConstructorStub stub(masm->isolate(), + initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -5110,7 +4884,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, for (int i = 0; i <= last_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(r3, Operand(kind)); - ArraySingleArgumentConstructorStub stub(kind); + ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); __ TailCallStub(&stub, eq); } @@ -5128,11 +4902,11 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); - stub.GetCode(isolate); + T stub(isolate, kind); + stub.GetCode(); if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { - T stub1(kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(isolate); + T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); + stub1.GetCode(); } } } @@ -5153,12 +4927,12 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things - InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); - stubh1.GetCode(isolate); - InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); - stubh2.GetCode(isolate); - InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); - stubh3.GetCode(isolate); + InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); + stubh1.GetCode(); + InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); + stubh2.GetCode(); + InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); + stubh3.GetCode(); } } @@ -5236,10 +5010,10 @@ void InternalArrayConstructorStub::GenerateCase( MacroAssembler* masm, ElementsKind kind) { __ cmp(r0, Operand(1)); - InternalArrayNoArgumentConstructorStub stub0(kind); + InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0, lo); - InternalArrayNArgumentsConstructorStub stubN(kind); + InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); __ TailCallStub(&stubN, hi); if (IsFastPackedElementsKind(kind)) { @@ -5249,11 +5023,11 @@ void InternalArrayConstructorStub::GenerateCase( __ cmp(r3, Operand::Zero()); InternalArraySingleArgumentConstructorStub - stub1_holey(GetHoleyElementsKind(kind)); + stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey, ne); } - InternalArraySingleArgumentConstructorStub stub1(kind); + InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); } @@ -5342,8 +5116,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kArgsLength == 7); - Isolate* isolate = masm->isolate(); - // context save __ push(context); // load context from callee @@ -5365,7 +5137,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ push(scratch); // isolate __ mov(scratch, - Operand(ExternalReference::isolate_address(isolate))); + Operand(ExternalReference::isolate_address(isolate()))); __ push(scratch); // holder __ push(holder); @@ -5377,7 +5149,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { // it's not controlled by GC. const int kApiStackSpace = 4; - FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL); + FrameScope frame_scope(masm, StackFrame::MANUAL); __ EnterExitFrame(false, kApiStackSpace); ASSERT(!api_function_address.is(r0) && !scratch.is(r0)); @@ -5397,11 +5169,8 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ str(ip, MemOperand(r0, 3 * kPointerSize)); const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); - ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_function_callback(isolate()); AllowExternalCallThatCantCauseGC scope(masm); MemOperand context_restore_operand( @@ -5437,7 +5206,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA const int kApiStackSpace = 1; - FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL); + FrameScope frame_scope(masm, StackFrame::MANUAL); __ EnterExitFrame(false, kApiStackSpace); // Create PropertyAccessorInfo instance on the stack above the exit frame with @@ -5447,12 +5216,8 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); - ExternalReference::Type thunk_type = - ExternalReference::PROFILING_GETTER_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_accessor_getter_callback(isolate()); __ CallApiFunctionAndReturn(api_function_address, thunk_ref, kStackUnwindSpace, diff --git a/deps/v8/src/arm/code-stubs-arm.h b/deps/v8/src/arm/code-stubs-arm.h index ef78802be..3237b3af4 100644 --- a/deps/v8/src/arm/code-stubs-arm.h +++ b/deps/v8/src/arm/code-stubs-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_CODE_STUBS_ARM_H_ #define V8_ARM_CODE_STUBS_ARM_H_ @@ -39,8 +16,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); class StoreBufferOverflowStub: public PlatformCodeStub { public: - explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) - : save_doubles_(save_fp) {} + StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp) + : PlatformCodeStub(isolate), save_doubles_(save_fp) {} void Generate(MacroAssembler* masm); @@ -91,7 +68,7 @@ class StringHelper : public AllStatic { class SubStringStub: public PlatformCodeStub { public: - SubStringStub() {} + explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: Major MajorKey() { return SubString; } @@ -104,7 +81,7 @@ class SubStringStub: public PlatformCodeStub { class StringCompareStub: public PlatformCodeStub { public: - StringCompareStub() { } + explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { } // Compares two flat ASCII strings and returns result in r0. static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, @@ -144,10 +121,12 @@ class StringCompareStub: public PlatformCodeStub { // so you don't have to set up the frame. class WriteInt32ToHeapNumberStub : public PlatformCodeStub { public: - WriteInt32ToHeapNumberStub(Register the_int, + WriteInt32ToHeapNumberStub(Isolate* isolate, + Register the_int, Register the_heap_number, Register scratch) - : the_int_(the_int), + : PlatformCodeStub(isolate), + the_int_(the_int), the_heap_number_(the_heap_number), scratch_(scratch) { } @@ -177,12 +156,14 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub { class RecordWriteStub: public PlatformCodeStub { public: - RecordWriteStub(Register object, + RecordWriteStub(Isolate* isolate, + Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) - : object_(object), + : PlatformCodeStub(isolate), + object_(object), value_(value), address_(address), remembered_set_action_(remembered_set_action), @@ -363,7 +344,7 @@ class RecordWriteStub: public PlatformCodeStub { // moved by GC class DirectCEntryStub: public PlatformCodeStub { public: - DirectCEntryStub() {} + explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} void Generate(MacroAssembler* masm); void GenerateCall(MacroAssembler* masm, Register target); @@ -379,7 +360,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub { public: enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; - explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { } + NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) + : PlatformCodeStub(isolate), mode_(mode) { } void Generate(MacroAssembler* masm); diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc index cfc9dfec4..8a46006eb 100644 --- a/deps/v8/src/arm/codegen-arm.cc +++ b/deps/v8/src/arm/codegen-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -102,13 +79,11 @@ UnaryMathFunction CreateExpFunction() { #if defined(V8_HOST_ARCH_ARM) OS::MemCopyUint8Function CreateMemCopyUint8Function( - OS::MemCopyUint8Function stub) { + OS::MemCopyUint8Function stub) { #if defined(USE_SIMULATOR) return stub; #else - if (Serializer::enabled() || !CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) { - return stub; - } + if (!CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) return stub; size_t actual_size; byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB, &actual_size, true)); if (buffer == NULL) return stub; @@ -260,13 +235,11 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( // Convert 8 to 16. The number of character to copy must be at least 8. OS::MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function( - OS::MemCopyUint16Uint8Function stub) { + OS::MemCopyUint16Uint8Function stub) { #if defined(USE_SIMULATOR) return stub; #else - if (Serializer::enabled() || !CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) { - return stub; - } + if (!CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) return stub; size_t actual_size; byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB, &actual_size, true)); if (buffer == NULL) return stub; @@ -849,47 +822,46 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008; #endif -static byte* GetNoCodeAgeSequence(uint32_t* length) { - // The sequence of instructions that is patched out for aging code is the - // following boilerplate stack-building prologue that is found in FUNCTIONS - static bool initialized = false; - static uint32_t sequence[kNoCodeAgeSequenceLength]; - byte* byte_sequence = reinterpret_cast<byte*>(sequence); - *length = kNoCodeAgeSequenceLength * Assembler::kInstrSize; - if (!initialized) { - // Since patcher is a large object, allocate it dynamically when needed, - // to avoid overloading the stack in stress conditions. - SmartPointer<CodePatcher> - patcher(new CodePatcher(byte_sequence, kNoCodeAgeSequenceLength)); - PredictableCodeSizeScope scope(patcher->masm(), *length); - patcher->masm()->PushFixedFrame(r1); - patcher->masm()->nop(ip.code()); - patcher->masm()->add( - fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); - initialized = true; - } - return byte_sequence; +CodeAgingHelper::CodeAgingHelper() { + ASSERT(young_sequence_.length() == kNoCodeAgeSequenceLength); + // Since patcher is a large object, allocate it dynamically when needed, + // to avoid overloading the stack in stress conditions. + // DONT_FLUSH is used because the CodeAgingHelper is initialized early in + // the process, before ARM simulator ICache is setup. + SmartPointer<CodePatcher> patcher( + new CodePatcher(young_sequence_.start(), + young_sequence_.length() / Assembler::kInstrSize, + CodePatcher::DONT_FLUSH)); + PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length()); + patcher->masm()->PushFixedFrame(r1); + patcher->masm()->nop(ip.code()); + patcher->masm()->add( + fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); +} + + +#ifdef DEBUG +bool CodeAgingHelper::IsOld(byte* candidate) const { + return Memory::uint32_at(candidate) == kCodeAgePatchFirstInstruction; } +#endif -bool Code::IsYoungSequence(byte* sequence) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); - bool result = !memcmp(sequence, young_sequence, young_length); - ASSERT(result || - Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction); +bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) { + bool result = isolate->code_aging_helper()->IsYoung(sequence); + ASSERT(result || isolate->code_aging_helper()->IsOld(sequence)); return result; } -void Code::GetCodeAgeAndParity(byte* sequence, Age* age, +void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity) { - if (IsYoungSequence(sequence)) { + if (IsYoungSequence(isolate, sequence)) { *age = kNoAgeCodeAge; *parity = NO_MARKING_PARITY; } else { Address target_address = Memory::Address_at( - sequence + Assembler::kInstrSize * (kNoCodeAgeSequenceLength - 1)); + sequence + (kNoCodeAgeSequenceLength - Assembler::kInstrSize)); Code* stub = GetCodeFromTargetAddress(target_address); GetCodeAgeAndParity(stub, age, parity); } @@ -900,10 +872,9 @@ void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age, MarkingParity parity) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); + uint32_t young_length = isolate->code_aging_helper()->young_sequence_length(); if (age == kNoAgeCodeAge) { - CopyBytes(sequence, young_sequence, young_length); + isolate->code_aging_helper()->CopyYoungSequenceTo(sequence); CPU::FlushICache(sequence, young_length); } else { Code* stub = GetCodeAgeStub(isolate, age, parity); diff --git a/deps/v8/src/arm/codegen-arm.h b/deps/v8/src/arm/codegen-arm.h index 0bf7ccadc..2fc8eb3f0 100644 --- a/deps/v8/src/arm/codegen-arm.h +++ b/deps/v8/src/arm/codegen-arm.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_CODEGEN_ARM_H_ #define V8_ARM_CODEGEN_ARM_H_ diff --git a/deps/v8/src/arm/constants-arm.cc b/deps/v8/src/arm/constants-arm.cc index 7d59a84b1..676239f82 100644 --- a/deps/v8/src/arm/constants-arm.cc +++ b/deps/v8/src/arm/constants-arm.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm/constants-arm.h b/deps/v8/src/arm/constants-arm.h index 14f4705cb..5bace505f 100644 --- a/deps/v8/src/arm/constants-arm.h +++ b/deps/v8/src/arm/constants-arm.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_CONSTANTS_ARM_H_ #define V8_ARM_CONSTANTS_ARM_H_ diff --git a/deps/v8/src/arm/cpu-arm.cc b/deps/v8/src/arm/cpu-arm.cc index 20c6a5dcc..083d9b39e 100644 --- a/deps/v8/src/arm/cpu-arm.cc +++ b/deps/v8/src/arm/cpu-arm.cc @@ -1,29 +1,6 @@ // Copyright 2006-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // CPU specific code for arm independent of OS goes here. #ifdef __arm__ @@ -46,16 +23,6 @@ namespace v8 { namespace internal { -void CPU::SetUp() { - CpuFeatures::Probe(); -} - - -bool CPU::SupportsCrankshaft() { - return CpuFeatures::IsSupported(VFP3); -} - - void CPU::FlushICache(void* start, size_t size) { // Nothing to do flushing no instructions. if (size == 0) { diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc index 12258ccad..c3270f0bc 100644 --- a/deps/v8/src/arm/debug-arm.cc +++ b/deps/v8/src/arm/debug-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -35,7 +12,6 @@ namespace v8 { namespace internal { -#ifdef ENABLE_DEBUGGER_SUPPORT bool BreakLocationIterator::IsDebugBreakAtReturn() { return Debug::IsDebugBreakAtReturn(rinfo()); } @@ -56,7 +32,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() { patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0)); patcher.masm()->blx(v8::internal::ip); patcher.Emit( - debug_info_->GetIsolate()->debug()->debug_break_return()->entry()); + debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry()); patcher.masm()->bkpt(0); } @@ -97,7 +73,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() { patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0)); patcher.masm()->blx(v8::internal::ip); patcher.Emit( - debug_info_->GetIsolate()->debug()->debug_break_slot()->entry()); + debug_info_->GetIsolate()->builtins()->Slot_DebugBreak()->entry()); } @@ -146,7 +122,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ mov(r0, Operand::Zero()); // no arguments __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate()))); - CEntryStub ceb(1); + CEntryStub ceb(masm->isolate(), 1); __ CallStub(&ceb); // Restore the register values from the expression stack. @@ -179,6 +155,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } +void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) { + // Register state for CallICStub + // ----------- S t a t e ------------- + // -- r1 : function + // -- r3 : slot in feedback array (smi) + // ----------------------------------- + Generate_DebugBreakCallHelper(masm, r1.bit() | r3.bit(), 0); +} + + void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Calling convention for IC load (from ic-arm.cc). // ----------- S t a t e ------------- @@ -235,15 +221,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { - // Calling convention for IC call (from ic-arm.cc) - // ----------- S t a t e ------------- - // -- r2 : name - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, r2.bit(), 0); -} - - void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { // In places other than IC call sites it is expected that r0 is TOS which // is an object - this is not generally the case so this should be used with @@ -261,17 +238,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { - // Register state for CallFunctionStub (from code-stubs-arm.cc). - // ----------- S t a t e ------------- - // -- r1 : function - // -- r2 : feedback array - // -- r3 : slot in feedback array - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit() | r3.bit(), 0); -} - - void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { // Calling convention for CallConstructStub (from code-stubs-arm.cc) // ----------- S t a t e ------------- @@ -329,10 +295,6 @@ const bool Debug::kFrameDropperSupported = false; #undef __ - - -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal #endif // V8_TARGET_ARCH_ARM diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc index ef3ea275c..aa98c8b75 100644 --- a/deps/v8/src/arm/deoptimizer-arm.cc +++ b/deps/v8/src/arm/deoptimizer-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -54,7 +31,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { // Fail hard and early if we enter this code object again. byte* pointer = code->FindCodeAgeSequence(); if (pointer != NULL) { - pointer += kNoCodeAgeSequenceLength * Assembler::kInstrSize; + pointer += kNoCodeAgeSequenceLength; } else { pointer = code->instruction_start(); } @@ -87,7 +64,8 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { // We need calls to have a predictable size in the unoptimized code, but // this is optimized code, so we don't have to have a predictable size. int call_size_in_bytes = - MacroAssembler::CallSizeNotPredictableCodeSize(deopt_entry, + MacroAssembler::CallSizeNotPredictableCodeSize(isolate, + deopt_entry, RelocInfo::NONE32); int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize; ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0); diff --git a/deps/v8/src/arm/disasm-arm.cc b/deps/v8/src/arm/disasm-arm.cc index aa8ee22b7..0a5d5b0d3 100644 --- a/deps/v8/src/arm/disasm-arm.cc +++ b/deps/v8/src/arm/disasm-arm.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // A Disassembler object is used to disassemble a block of code instruction by // instruction. The default implementation of the NameConverter object can be @@ -1272,7 +1249,7 @@ void Decoder::DecodeTypeVFP(Instruction* instr) { } else if ((instr->Opc2Value() == 0xA) && (instr->Opc3Value() == 0x3) && (instr->Bit(8) == 1)) { // vcvt.f64.s32 Dd, Dd, #<fbits> - int fraction_bits = 32 - ((instr->Bit(5) << 4) | instr->Bits(3, 0)); + int fraction_bits = 32 - ((instr->Bits(3, 0) << 1) | instr->Bit(5)); Format(instr, "vcvt'cond.f64.s32 'Dd, 'Dd"); out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_, ", #%d", fraction_bits); diff --git a/deps/v8/src/arm/frames-arm.cc b/deps/v8/src/arm/frames-arm.cc index 780b48a8e..605f9f422 100644 --- a/deps/v8/src/arm/frames-arm.cc +++ b/deps/v8/src/arm/frames-arm.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm/frames-arm.h b/deps/v8/src/arm/frames-arm.h index 29000ca3a..6dd518640 100644 --- a/deps/v8/src/arm/frames-arm.h +++ b/deps/v8/src/arm/frames-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_FRAMES_ARM_H_ #define V8_ARM_FRAMES_ARM_H_ diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index b5ec2d5fd..c22caa4a8 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -118,14 +95,20 @@ static void EmitStackCheck(MacroAssembler* masm_, Isolate* isolate = masm_->isolate(); Label ok; ASSERT(scratch.is(sp) == (pointers == 0)); + Heap::RootListIndex index; if (pointers != 0) { __ sub(scratch, sp, Operand(pointers * kPointerSize)); + index = Heap::kRealStackLimitRootIndex; + } else { + index = Heap::kStackLimitRootIndex; } - __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex); + __ LoadRoot(stack_limit_scratch, index); __ cmp(scratch, Operand(stack_limit_scratch)); __ b(hs, &ok); - PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); - __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET); + Handle<Code> stack_check = isolate->builtins()->StackCheck(); + PredictableCodeSizeScope predictable(masm_, + masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); + __ Call(stack_check, RelocInfo::CODE_TARGET); __ bind(&ok); } @@ -150,8 +133,6 @@ void FullCodeGenerator::Generate() { handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); - InitializeFeedbackVector(); - profiling_counter_ = isolate()->factory()->NewCell( Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -236,7 +217,7 @@ void FullCodeGenerator::Generate() { __ Push(info->scope()->GetScopeInfo()); __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(r1); @@ -297,7 +278,7 @@ void FullCodeGenerator::Generate() { } else { type = ArgumentsAccessStub::NEW_SLOPPY_FAST; } - ArgumentsAccessStub stub(type); + ArgumentsAccessStub stub(isolate(), type); __ CallStub(&stub); SetVar(arguments, r0, r1, r2); @@ -1187,12 +1168,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle<Object> feedback = Handle<Object>( - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), - isolate()); - StoreFeedbackVectorSlot(slot, feedback); __ Move(r1, FeedbackVector()); - __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); + __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check @@ -1351,7 +1328,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode(), info->is_generator()); + FastNewClosureStub stub(isolate(), + info->strict_mode(), + info->is_generator()); __ mov(r2, Operand(info)); __ CallStub(&stub); } else { @@ -1671,13 +1650,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { : ObjectLiteral::kNoFlags; __ mov(r0, Operand(Smi::FromInt(flags))); int properties_count = constant_properties->length() / 2; - if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || - flags != ObjectLiteral::kFastElements || + if (expr->may_store_doubles() || expr->depth() > 1 || + Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4); } else { - FastCloneShallowObjectStub stub(properties_count); + FastCloneShallowObjectStub stub(isolate(), properties_count); __ CallStub(&stub); } @@ -1816,13 +1795,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { if (has_fast_elements && constant_elements_values->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( + isolate(), FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, allocation_site_mode, length); __ CallStub(&stub); __ IncrementCounter( isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); - } else if (expr->depth() > 1 || Serializer::enabled() || + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ mov(r0, Operand(Smi::FromInt(flags))); __ Push(r3, r2, r1, r0); @@ -1837,7 +1817,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; } - FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, + length); __ CallStub(&stub); } @@ -1869,7 +1850,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); } else { __ mov(r3, Operand(Smi::FromInt(i))); - StoreArrayLiteralElementStub stub; + StoreArrayLiteralElementStub stub(isolate()); __ CallStub(&stub); } @@ -1886,7 +1867,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { void FullCodeGenerator::VisitAssignment(Assignment* expr) { - ASSERT(expr->target()->IsValidLeftHandSide()); + ASSERT(expr->target()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ Assignment"); @@ -2114,7 +2095,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) { CallIC(ic, TypeFeedbackId::None()); __ mov(r1, r0); __ str(r1, MemOperand(sp, 2 * kPointerSize)); - CallFunctionStub stub(1, CALL_AS_METHOD); + CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); __ CallStub(&stub); __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2272,7 +2253,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Label gc_required; Label allocated; - Handle<Map> map(isolate()->native_context()->generator_result_map()); + Handle<Map> map(isolate()->native_context()->iterator_result_map()); __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT); __ jmp(&allocated); @@ -2343,8 +2324,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, patch_site.EmitJumpIfSmi(scratch1, &smi_case); __ bind(&stub_call); - BinaryOpICStub stub(op, mode); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + BinaryOpICStub stub(isolate(), op, mode); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); __ jmp(&done); @@ -2419,16 +2400,16 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode) { __ pop(r1); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); context()->Plug(r0); } void FullCodeGenerator::EmitAssignment(Expression* expr) { - ASSERT(expr->IsValidLeftHandSide()); + ASSERT(expr->IsValidReferenceExpression()); // Left-hand side can only be a property, a global or a (parameter or local) // slot. @@ -2628,14 +2609,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code, // Code common for calls using the IC. -void FullCodeGenerator::EmitCallWithIC(Call* expr) { +void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - CallFunctionFlags flags; + CallIC::CallType call_type = callee->IsVariableProxy() + ? CallIC::FUNCTION + : CallIC::METHOD; + // Get the target function. - if (callee->IsVariableProxy()) { + if (call_type == CallIC::FUNCTION) { { StackValueContext context(this); EmitVariableLoad(callee->AsVariableProxy()); PrepareForBailout(callee, NO_REGISTERS); @@ -2643,7 +2625,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push undefined as receiver. This is patched in the method prologue if it // is a sloppy mode method. __ Push(isolate()->factory()->undefined_value()); - flags = NO_CALL_FUNCTION_FLAGS; } else { // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2654,40 +2635,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { __ ldr(ip, MemOperand(sp, 0)); __ push(ip); __ str(r0, MemOperand(sp, kPointerSize)); - flags = CALL_AS_METHOD; } - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, flags); - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - - // Restore context register. - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, r0); + EmitCall(expr, call_type); } // Code common for calls using the IC. -void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, - Expression* key) { +void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, + Expression* key) { // Load the key. VisitForAccumulatorValue(key); Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2700,28 +2660,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, __ push(ip); __ str(r0, MemOperand(sp, kPointerSize)); - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, CALL_AS_METHOD); - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - // Restore context register. - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, r0); + EmitCall(expr, CallIC::METHOD); } -void FullCodeGenerator::EmitCallWithStub(Call* expr) { - // Code common for calls using the call stub. +void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { + // Load the arguments. ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); { PreservePositionScope scope(masm()->positions_recorder()); @@ -2729,19 +2673,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { VisitForStackValue(args->at(i)); } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); - __ Move(r2, FeedbackVector()); + // Record source position of the IC call. + SetSourcePosition(expr->position()); + Handle<Code> ic = CallIC::initialize_stub( + isolate(), arg_count, call_type); __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); - - // Record call targets in unoptimized code. - CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); + // Don't assign a type feedback id to the IC, since type feedback is provided + // by the vector above. + CallIC(ic); + RecordJSReturnSite(expr); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2816,7 +2758,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { // Record source position for debugger. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); RecordJSReturnSite(expr); @@ -2824,7 +2766,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); context()->DropAndPlug(1, r0); } else if (call_type == Call::GLOBAL_CALL) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else if (call_type == Call::LOOKUP_SLOT_CALL) { // Call to a lookup slot (dynamically introduced variable). @@ -2864,16 +2806,16 @@ void FullCodeGenerator::VisitCall(Call* expr) { // The receiver is either the global receiver or an object found // by LoadContextSlot. - EmitCallWithStub(expr); + EmitCall(expr); } else if (call_type == Call::PROPERTY_CALL) { Property* property = callee->AsProperty(); { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(property->obj()); } if (property->key()->IsPropertyName()) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else { - EmitKeyedCallWithIC(expr, property->key()); + EmitKeyedCallWithLoadIC(expr, property->key()); } } else { ASSERT(call_type == Call::OTHER_CALL); @@ -2884,7 +2826,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); __ push(r1); // Emit function call. - EmitCallWithStub(expr); + EmitCall(expr); } #ifdef DEBUG @@ -2921,12 +2863,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); // Record call targets in unoptimized code. - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); if (FLAG_pretenuring_call_new) { - StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), - isolate()->factory()->NewAllocationSite()); + EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); ASSERT(expr->AllocationSiteFeedbackSlot() == expr->CallNewFeedbackSlot() + 1); } @@ -2934,8 +2872,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ Move(r2, FeedbackVector()); __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); - CallConstructStub stub(RECORD_CALL_TARGET); - __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); + CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); + __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); PrepareForBailoutForId(expr->ReturnId(), TOS_REG); context()->Plug(r0); } @@ -3305,7 +3243,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ mov(r1, r0); __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); - ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); __ CallStub(&stub); context()->Plug(r0); } @@ -3391,31 +3329,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { } -void FullCodeGenerator::EmitLog(CallRuntime* expr) { - // Conditionally generate a log call. - // Args: - // 0 (literal string): The type of logging (corresponds to the flags). - // This is used to determine whether or not to generate the log call. - // 1 (string): Format string. Access the string at argument index 2 - // with '%2s' (see Logger::LogRuntime for all the formats). - // 2 (array): Arguments to the format string. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT_EQ(args->length(), 3); - if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { - VisitForStackValue(args->at(1)); - VisitForStackValue(args->at(2)); - __ CallRuntime(Runtime::kHiddenLog, 2); - } - - // Finally, we're expected to leave a value on the top of the stack. - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); - context()->Plug(r0); -} - - void FullCodeGenerator::EmitSubString(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - SubStringStub stub; + SubStringStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3428,7 +3344,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpExecStub stub; + RegExpExecStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 4); VisitForStackValue(args->at(0)); @@ -3578,7 +3494,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { ASSERT(args->length() == 2); VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - MathPowStub stub(MathPowStub::ON_STACK); + MathPowStub stub(isolate(), MathPowStub::ON_STACK); __ CallStub(&stub); context()->Plug(r0); } @@ -3618,7 +3534,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { // Load the argument into r0 and call the stub. VisitForAccumulatorValue(args->at(0)); - NumberToStringStub stub; + NumberToStringStub stub(isolate()); __ CallStub(&stub); context()->Plug(r0); } @@ -3741,7 +3657,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { VisitForAccumulatorValue(args->at(1)); __ pop(r1); - StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); + StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); __ CallStub(&stub); context()->Plug(r0); } @@ -3753,32 +3669,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - StringCompareStub stub; + StringCompareStub stub(isolate()); __ CallStub(&stub); context()->Plug(r0); } -void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_log, 1); - context()->Plug(r0); -} - - -void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_sqrt, 1); - context()->Plug(r0); -} - - void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() >= 2); @@ -3812,7 +3708,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -4178,7 +4074,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { // Record source position of the IC call. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); @@ -4310,7 +4206,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { - ASSERT(expr->expression()->IsValidLeftHandSide()); + ASSERT(expr->expression()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ CountOperation"); SetSourcePosition(expr->position()); @@ -4396,7 +4292,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ jmp(&stub_call); __ bind(&slow); } - ToNumberStub convert_stub; + ToNumberStub convert_stub(isolate()); __ CallStub(&convert_stub); // Save result for postfix expressions. @@ -4427,8 +4323,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { // Record position before stub call. SetSourcePosition(expr->position()); - BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); + BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE); + CallIC(stub.GetCode(), expr->CountBinOpFeedbackId()); patch_site.EmitPatchInfo(); __ bind(&done); @@ -4539,13 +4435,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - if (check->Equals(isolate()->heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(check, factory->number_string())) { __ JumpIfSmi(r0, if_true); __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); __ cmp(r0, ip); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->string_string())) { + } else if (String::Equals(check, factory->string_string())) { __ JumpIfSmi(r0, if_false); // Check for undetectable objects => false. __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); @@ -4553,20 +4450,20 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); __ tst(r1, Operand(1 << Map::kIsUndetectable)); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->symbol_string())) { + } else if (String::Equals(check, factory->symbol_string())) { __ JumpIfSmi(r0, if_false); __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->boolean_string())) { + } else if (String::Equals(check, factory->boolean_string())) { __ CompareRoot(r0, Heap::kTrueValueRootIndex); __ b(eq, if_true); __ CompareRoot(r0, Heap::kFalseValueRootIndex); Split(eq, if_true, if_false, fall_through); } else if (FLAG_harmony_typeof && - check->Equals(isolate()->heap()->null_string())) { + String::Equals(check, factory->null_string())) { __ CompareRoot(r0, Heap::kNullValueRootIndex); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->undefined_string())) { + } else if (String::Equals(check, factory->undefined_string())) { __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); __ b(eq, if_true); __ JumpIfSmi(r0, if_false); @@ -4576,14 +4473,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ tst(r1, Operand(1 << Map::kIsUndetectable)); Split(ne, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->function_string())) { + } else if (String::Equals(check, factory->function_string())) { __ JumpIfSmi(r0, if_false); STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); __ b(eq, if_true); __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->object_string())) { + } else if (String::Equals(check, factory->object_string())) { __ JumpIfSmi(r0, if_false); if (!FLAG_harmony_typeof) { __ CompareRoot(r0, Heap::kNullValueRootIndex); @@ -4636,7 +4533,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::INSTANCEOF: { VisitForStackValue(expr->right()); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ CallStub(&stub); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); // The stub returns 0 for true. @@ -4780,7 +4677,8 @@ void FullCodeGenerator::EnterFinallyBlock() { ExternalReference has_pending_message = ExternalReference::address_of_has_pending_message(isolate()); __ mov(ip, Operand(has_pending_message)); - __ ldr(r1, MemOperand(ip)); + STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) + __ ldrb(r1, MemOperand(ip)); __ SmiTag(r1); __ push(r1); @@ -4806,7 +4704,8 @@ void FullCodeGenerator::ExitFinallyBlock() { ExternalReference has_pending_message = ExternalReference::address_of_has_pending_message(isolate()); __ mov(ip, Operand(has_pending_message)); - __ str(r1, MemOperand(ip)); + STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) + __ strb(r1, MemOperand(ip)); __ pop(r1); ExternalReference pending_message_obj = diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc index 3d57105af..4626e3751 100644 --- a/deps/v8/src/arm/ic-arm.cc +++ b/deps/v8/src/arm/ic-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc index 55705b807..0c10a65c2 100644 --- a/deps/v8/src/arm/lithium-arm.cc +++ b/deps/v8/src/arm/lithium-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -623,6 +600,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); + // We can't really figure out if the environment is needed or not. + instr->environment()->set_has_been_used(); } return instr; @@ -871,7 +850,8 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 - if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) { + if (!(instr->ClobbersRegisters() && + instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { @@ -931,18 +911,20 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) { if (goto_instr != NULL) return goto_instr; HValue* value = instr->value(); - LBranch* result = new(zone()) LBranch(UseRegister(value)); - // Tagged values that are not known smis or booleans require a - // deoptimization environment. If the instruction is generic no - // environment is needed since all cases are handled. - Representation rep = value->representation(); + Representation r = value->representation(); HType type = value->type(); ToBooleanStub::Types expected = instr->expected_input_types(); - if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() && - !expected.IsGeneric()) { - return AssignEnvironment(result); + if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); + + bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() || + type.IsJSArray() || type.IsHeapNumber() || type.IsString(); + LInstruction* branch = new(zone()) LBranch(UseRegister(value)); + if (!easy_case && + ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) || + !expected.IsGeneric())) { + branch = AssignEnvironment(branch); } - return result; + return branch; } @@ -1138,8 +1120,11 @@ LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) { ? NULL : UseFixed(instr->context(), cp); LOperand* input = UseRegister(instr->value()); - LMathAbs* result = new(zone()) LMathAbs(context, input); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + LInstruction* result = + DefineAsRegister(new(zone()) LMathAbs(context, input)); + if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result); + if (!r.IsDouble()) result = AssignEnvironment(result); + return result; } @@ -1284,15 +1269,25 @@ LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) { } -LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { +LInstruction* LChunkBuilder::DoDivI(HDiv* instr) { ASSERT(instr->representation().IsSmiOrInteger32()); ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); LOperand* divisor = UseRegister(instr->right()); LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d4); - LDivI* div = new(zone()) LDivI(dividend, divisor, temp); - return AssignEnvironment(DefineAsRegister(div)); + LInstruction* result = + DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp)); + if (instr->CheckFlag(HValue::kCanBeDivByZero) || + instr->CheckFlag(HValue::kBailoutOnMinusZero) || + (instr->CheckFlag(HValue::kCanOverflow) && + (!CpuFeatures::IsSupported(SUDIV) || + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) || + (!instr->IsMathFloorOfDiv() && + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { + result = AssignEnvironment(result); + } + return result; } @@ -1346,13 +1341,25 @@ LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) { } +LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) { + ASSERT(instr->representation().IsSmiOrInteger32()); + ASSERT(instr->left()->representation().Equals(instr->representation())); + ASSERT(instr->right()->representation().Equals(instr->representation())); + LOperand* dividend = UseRegister(instr->left()); + LOperand* divisor = UseRegister(instr->right()); + LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d4); + LFlooringDivI* div = new(zone()) LFlooringDivI(dividend, divisor, temp); + return AssignEnvironment(DefineAsRegister(div)); +} + + LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { if (instr->RightIsPowerOf2()) { return DoFlooringDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoFlooringDivByConstI(instr); } else { - return DoDivI(instr); + return DoFlooringDivI(instr); } } @@ -1647,6 +1654,8 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { + LInstruction* goto_instr = CheckElideControlInstruction(instr); + if (goto_instr != NULL) return goto_instr; Representation r = instr->representation(); if (r.IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(r)); @@ -1801,9 +1810,16 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - LOperand* value = UseRegisterOrConstantAtStart(instr->index()); - LOperand* length = UseRegister(instr->length()); - return AssignEnvironment(new(zone()) LBoundsCheck(value, length)); + if (!FLAG_debug_code && instr->skip_check()) return NULL; + LOperand* index = UseRegisterOrConstantAtStart(instr->index()); + LOperand* length = !index->IsConstantOperand() + ? UseRegisterOrConstantAtStart(instr->length()) + : UseRegisterAtStart(instr->length()); + LInstruction* result = new(zone()) LBoundsCheck(index, length); + if (!FLAG_debug_code || !instr->skip_check()) { + result = AssignEnvironment(result); + } + return result; } @@ -1837,20 +1853,21 @@ LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) { LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); + HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LDummyUse(value)); } from = Representation::Tagged(); } if (from.IsTagged()) { if (to.IsDouble()) { - LOperand* value = UseRegister(instr->value()); - LNumberUntagD* res = new(zone()) LNumberUntagD(value); - return AssignEnvironment(DefineAsRegister(res)); + LOperand* value = UseRegister(val); + LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); if (val->type().IsSmi()) { return DefineSameAsFirst(new(zone()) LDummyUse(value)); @@ -1858,66 +1875,59 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value))); } else { ASSERT(to.IsInteger32()); - LOperand* value = NULL; - LInstruction* res = NULL; - HValue* val = instr->value(); if (val->type().IsSmi() || val->representation().IsSmi()) { - value = UseRegisterAtStart(val); - res = DefineAsRegister(new(zone()) LSmiUntag(value, false)); + LOperand* value = UseRegisterAtStart(val); + return DefineAsRegister(new(zone()) LSmiUntag(value, false)); } else { - value = UseRegister(val); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = FixedTemp(d11); - res = DefineSameAsFirst(new(zone()) LTaggedToI(value, - temp1, - temp2)); - res = AssignEnvironment(res); + LInstruction* result = + DefineSameAsFirst(new(zone()) LTaggedToI(value, temp1, temp2)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } - return res; } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); - - // Make sure that the temp and result_temp registers are - // different. LUnallocated* result_temp = TempRegister(); LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2); - Define(result, result_temp); - return AssignPointerMap(result); + return AssignPointerMap(Define(result, result_temp)); } else if (to.IsSmi()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return AssignEnvironment( DefineAsRegister(new(zone()) LDoubleToSmi(value))); } else { ASSERT(to.IsInteger32()); - LOperand* value = UseRegister(instr->value()); - LDoubleToI* res = new(zone()) LDoubleToI(value); - return AssignEnvironment(DefineAsRegister(res)); + LOperand* value = UseRegister(val); + LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value)); + if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result); + return result; } } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { - HValue* val = instr->value(); - LOperand* value = UseRegisterAtStart(val); if (!instr->CheckFlag(HValue::kCanOverflow)) { + LOperand* value = UseRegisterAtStart(val); return DefineAsRegister(new(zone()) LSmiTag(value)); } else if (val->CheckFlag(HInstruction::kUint32)) { + LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } else { + LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value)); if (instr->CheckFlag(HValue::kCanOverflow)) { @@ -1926,12 +1936,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return result; } else { ASSERT(to.IsDouble()); - if (instr->value()->CheckFlag(HInstruction::kUint32)) { - return DefineAsRegister( - new(zone()) LUint32ToDouble(UseRegister(instr->value()))); + if (val->CheckFlag(HInstruction::kUint32)) { + return DefineAsRegister(new(zone()) LUint32ToDouble(UseRegister(val))); } else { - return DefineAsRegister( - new(zone()) LInteger32ToDouble(Use(instr->value()))); + return DefineAsRegister(new(zone()) LInteger32ToDouble(Use(val))); } } } @@ -1942,7 +1950,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckNonSmi(value)); + LInstruction* result = new(zone()) LCheckNonSmi(value); + if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result); + return result; } @@ -1966,15 +1976,12 @@ LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { - LOperand* value = NULL; - if (!instr->CanOmitMapChecks()) { - value = UseRegisterAtStart(instr->value()); - if (instr->has_migration_target()) info()->MarkAsDeferredCalling(); - } - LCheckMaps* result = new(zone()) LCheckMaps(value); - if (!instr->CanOmitMapChecks()) { - AssignEnvironment(result); - if (instr->has_migration_target()) return AssignPointerMap(result); + if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps; + LOperand* value = UseRegisterAtStart(instr->value()); + LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value)); + if (instr->HasMigrationTarget()) { + info()->MarkAsDeferredCalling(); + result = AssignPointerMap(result); } return result; } @@ -2072,7 +2079,10 @@ LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) { LOperand* context = UseRegisterAtStart(instr->value()); LInstruction* result = DefineAsRegister(new(zone()) LLoadContextSlot(context)); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -2087,7 +2097,10 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { value = UseRegister(instr->value()); } LInstruction* result = new(zone()) LStoreContextSlot(context, value); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -2122,7 +2135,7 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { ASSERT(instr->key()->representation().IsSmiOrInteger32()); ElementsKind elements_kind = instr->elements_kind(); LOperand* key = UseRegisterOrConstantAtStart(instr->key()); - LLoadKeyed* result = NULL; + LInstruction* result = NULL; if (!instr->is_typed_elements()) { LOperand* obj = NULL; @@ -2132,24 +2145,28 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { ASSERT(instr->representation().IsSmiOrTagged()); obj = UseRegisterAtStart(instr->elements()); } - result = new(zone()) LLoadKeyed(obj, key); + result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key)); } else { ASSERT( (instr->representation().IsInteger32() && - !IsDoubleOrFloatElementsKind(instr->elements_kind())) || + !IsDoubleOrFloatElementsKind(elements_kind)) || (instr->representation().IsDouble() && - IsDoubleOrFloatElementsKind(instr->elements_kind()))); + IsDoubleOrFloatElementsKind(elements_kind))); LOperand* backing_store = UseRegister(instr->elements()); - result = new(zone()) LLoadKeyed(backing_store, key); + result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key)); } - DefineAsRegister(result); - // An unsigned int array load might overflow and cause a deopt, make sure it - // has an environment. - bool can_deoptimize = instr->RequiresHoleCheck() || - elements_kind == EXTERNAL_UINT32_ELEMENTS || - elements_kind == UINT32_ELEMENTS; - return can_deoptimize ? AssignEnvironment(result) : result; + if ((instr->is_external() || instr->is_fixed_typed_array()) ? + // see LCodeGen::DoLoadKeyedExternalArray + ((elements_kind == EXTERNAL_UINT32_ELEMENTS || + elements_kind == UINT32_ELEMENTS) && + !instr->CheckFlag(HInstruction::kUint32)) : + // see LCodeGen::DoLoadKeyedFixedDoubleArray and + // LCodeGen::DoLoadKeyedFixedArray + instr->RequiresHoleCheck()) { + result = AssignEnvironment(result); + } + return result; } @@ -2225,17 +2242,18 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { - LOperand* object = UseRegister(instr->object()); if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { + LOperand* object = UseRegister(instr->object()); LOperand* new_map_reg = TempRegister(); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, NULL, new_map_reg); return result; } else { + LOperand* object = UseFixed(instr->object(), r0); LOperand* context = UseFixed(instr->context(), cp); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, context, NULL); - return AssignPointerMap(result); + return MarkAsCall(result, instr); } } @@ -2279,11 +2297,11 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { // We need a temporary register for write barrier of the map field. LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL; - LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp); - if (instr->field_representation().IsHeapObject()) { - if (!instr->value()->type().IsHeapObject()) { - return AssignEnvironment(result); - } + LInstruction* result = new(zone()) LStoreNamedField(obj, val, temp); + if (!instr->access().IsExternalMemory() && + instr->field_representation().IsHeapObject() && + !instr->value()->type().IsHeapObject()) { + result = AssignEnvironment(result); } return result; } @@ -2315,7 +2333,7 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { LOperand* context = UseAny(instr->context()); LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(context, string, index); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } @@ -2371,7 +2389,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { } else { ASSERT(info()->IsStub()); CodeStubInterfaceDescriptor* descriptor = - info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); + info()->code_stub()->GetInterfaceDescriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor->GetParameterRegister(index); return DefineFixed(result, reg); @@ -2478,6 +2496,7 @@ LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); + outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining(instr->closure(), instr->arguments_count(), @@ -2538,7 +2557,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegister(instr->object()); LOperand* index = UseRegister(instr->index()); - return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); + LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index); + LInstruction* result = DefineSameAsFirst(load); + return AssignPointerMap(result); } } } // namespace v8::internal diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h index 34eb51017..1a90eb638 100644 --- a/deps/v8/src/arm/lithium-arm.h +++ b/deps/v8/src/arm/lithium-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_LITHIUM_ARM_H_ #define V8_ARM_LITHIUM_ARM_H_ @@ -97,6 +74,7 @@ class LCodeGen; V(DummyUse) \ V(FlooringDivByConstI) \ V(FlooringDivByPowerOf2I) \ + V(FlooringDivI) \ V(ForInCacheArray) \ V(ForInPrepareMap) \ V(FunctionLiteral) \ @@ -261,7 +239,9 @@ class LInstruction : public ZoneObject { // Interface to the register allocator and iterators. bool ClobbersTemps() const { return IsCall(); } bool ClobbersRegisters() const { return IsCall(); } - virtual bool ClobbersDoubleRegisters() const { return IsCall(); } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const { + return IsCall(); + } // Interface to the register allocator and iterators. bool IsMarkedAsCall() const { return IsCall(); } @@ -713,14 +693,14 @@ class LDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 0> { class LDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { public: - LDivI(LOperand* left, LOperand* right, LOperand* temp) { - inputs_[0] = left; - inputs_[1] = right; + LDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; temps_[0] = temp; } - LOperand* left() { return inputs_[0]; } - LOperand* right() { return inputs_[1]; } + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i") @@ -767,6 +747,23 @@ class LFlooringDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 2> { }; +class LFlooringDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { + public: + LFlooringDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; + temps_[0] = temp; + } + + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } + LOperand* temp() { return temps_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(FlooringDivI, "flooring-div-i") + DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) +}; + + class LMulI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: LMulI(LOperand* left, LOperand* right) { @@ -1968,7 +1965,7 @@ class LCallRuntime V8_FINAL : public LTemplateInstruction<1, 1, 0> { DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime") DECLARE_HYDROGEN_ACCESSOR(CallRuntime) - virtual bool ClobbersDoubleRegisters() const V8_OVERRIDE { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { return save_doubles() == kDontSaveFPRegs; } @@ -2164,7 +2161,6 @@ class LStoreNamedField V8_FINAL : public LTemplateInstruction<0, 2, 1> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; - Handle<Map> transition() const { return hydrogen()->transition_map(); } Representation representation() const { return hydrogen()->field_representation(); } @@ -2379,7 +2375,7 @@ class LCheckInstanceType V8_FINAL : public LTemplateInstruction<0, 1, 0> { class LCheckMaps V8_FINAL : public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMaps(LOperand* value) { + explicit LCheckMaps(LOperand* value = NULL) { inputs_[0] = value; } @@ -2696,6 +2692,8 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { next_block_(NULL), allocator_(allocator) { } + Isolate* isolate() const { return graph_->isolate(); } + // Build the sequence for the graph. LPlatformChunk* Build(); @@ -2722,12 +2720,13 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { LInstruction* DoMathClz32(HUnaryMathOperation* instr); LInstruction* DoDivByPowerOf2I(HDiv* instr); LInstruction* DoDivByConstI(HDiv* instr); - LInstruction* DoDivI(HBinaryOperation* instr); + LInstruction* DoDivI(HDiv* instr); LInstruction* DoModByPowerOf2I(HMod* instr); LInstruction* DoModByConstI(HMod* instr); LInstruction* DoModI(HMod* instr); LInstruction* DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr); LInstruction* DoFlooringDivByConstI(HMathFloorOfDiv* instr); + LInstruction* DoFlooringDivI(HMathFloorOfDiv* instr); private: enum Status { diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 7152ba21c..5a01d3bc8 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -86,13 +63,6 @@ void LCodeGen::FinishCode(Handle<Code> code) { code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); PopulateDeoptimizationData(code); - info()->CommitDependencies(code); -} - - -void LCodeGen::Abort(BailoutReason reason) { - info()->set_bailout_reason(reason); - status_ = ABORTED; } @@ -207,7 +177,7 @@ bool LCodeGen::GeneratePrologue() { Comment(";;; Allocate local context"); // Argument to NewContext is the function, which is in r1. if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(r1); @@ -714,6 +684,16 @@ void LCodeGen::AddToTranslation(LEnvironment* environment, } +int LCodeGen::CallCodeSize(Handle<Code> code, RelocInfo::Mode mode) { + int size = masm()->CallSize(code, mode); + if (code->kind() == Code::BINARY_OP_IC || + code->kind() == Code::COMPARE_IC) { + size += Assembler::kInstrSize; // extra nop() added in CallCodeGeneric. + } + return size; +} + + void LCodeGen::CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr, @@ -783,6 +763,7 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode) { + environment->set_has_been_used(); if (!environment->HasBeenRegistered()) { // Physical stack frame layout: // -x ............. -4 0 ..................................... y @@ -906,7 +887,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { int length = deoptimizations_.length(); if (length == 0) return; Handle<DeoptimizationInputData> data = - factory()->NewDeoptimizationInputData(length, TENURED); + DeoptimizationInputData::New(isolate(), length, TENURED); Handle<ByteArray> translations = translations_.CreateByteArray(isolate()->factory()); @@ -1095,18 +1076,18 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ASSERT(ToRegister(instr->result()).is(r0)); switch (instr->hydrogen()->major_key()) { case CodeStub::RegExpExec: { - RegExpExecStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + RegExpExecStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::SubString: { - SubStringStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + SubStringStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::StringCompare: { - StringCompareStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + StringCompareStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } default: @@ -1293,7 +1274,7 @@ void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { Register dividend = ToRegister(instr->dividend()); int32_t divisor = instr->divisor(); Register result = ToRegister(instr->result()); - ASSERT(divisor == kMinInt || (divisor != 0 && IsPowerOf2(Abs(divisor)))); + ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); ASSERT(!result.is(dividend)); // Check for (0 / -x) that will produce negative zero. @@ -1363,15 +1344,16 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. void LCodeGen::DoDivI(LDivI* instr) { HBinaryOperation* hdiv = instr->hydrogen(); - Register left = ToRegister(instr->left()); - Register right = ToRegister(instr->right()); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); Register result = ToRegister(instr->result()); // Check for x / 0. if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { - __ cmp(right, Operand::Zero()); + __ cmp(divisor, Operand::Zero()); DeoptimizeIf(eq, instr->environment()); } @@ -1380,10 +1362,10 @@ void LCodeGen::DoDivI(LDivI* instr) { Label positive; if (!instr->hydrogen_value()->CheckFlag(HValue::kCanBeDivByZero)) { // Do the test only if it hadn't be done above. - __ cmp(right, Operand::Zero()); + __ cmp(divisor, Operand::Zero()); } __ b(pl, &positive); - __ cmp(left, Operand::Zero()); + __ cmp(dividend, Operand::Zero()); DeoptimizeIf(eq, instr->environment()); __ bind(&positive); } @@ -1394,39 +1376,30 @@ void LCodeGen::DoDivI(LDivI* instr) { !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { // We don't need to check for overflow when truncating with sdiv // support because, on ARM, sdiv kMinInt, -1 -> kMinInt. - __ cmp(left, Operand(kMinInt)); - __ cmp(right, Operand(-1), eq); + __ cmp(dividend, Operand(kMinInt)); + __ cmp(divisor, Operand(-1), eq); DeoptimizeIf(eq, instr->environment()); } if (CpuFeatures::IsSupported(SUDIV)) { CpuFeatureScope scope(masm(), SUDIV); - __ sdiv(result, left, right); + __ sdiv(result, dividend, divisor); } else { DoubleRegister vleft = ToDoubleRegister(instr->temp()); DoubleRegister vright = double_scratch0(); - __ vmov(double_scratch0().low(), left); + __ vmov(double_scratch0().low(), dividend); __ vcvt_f64_s32(vleft, double_scratch0().low()); - __ vmov(double_scratch0().low(), right); + __ vmov(double_scratch0().low(), divisor); __ vcvt_f64_s32(vright, double_scratch0().low()); __ vdiv(vleft, vleft, vright); // vleft now contains the result. __ vcvt_s32_f64(double_scratch0().low(), vleft); __ vmov(result, double_scratch0().low()); } - if (hdiv->IsMathFloorOfDiv()) { - Label done; - Register remainder = scratch0(); - __ mls(remainder, result, right, left); - __ cmp(remainder, Operand::Zero()); - __ b(eq, &done); - __ eor(remainder, remainder, Operand(right)); - __ add(result, result, Operand(remainder, ASR, 31)); - __ bind(&done); - } else if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { // Compute remainder and deopt if it's not zero. Register remainder = scratch0(); - __ mls(remainder, result, right, left); + __ mls(remainder, result, divisor, dividend); __ cmp(remainder, Operand::Zero()); DeoptimizeIf(ne, instr->environment()); } @@ -1476,19 +1449,21 @@ void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(eq, instr->environment()); } - if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { - // Note that we could emit branch-free code, but that would need one more - // register. - if (divisor == -1) { - DeoptimizeIf(vs, instr->environment()); - __ mov(result, Operand(dividend, ASR, shift)); - } else { - __ mov(result, Operand(kMinInt / divisor), LeaveCC, vs); - __ mov(result, Operand(dividend, ASR, shift), LeaveCC, vc); - } - } else { + + // If the negation could not overflow, simply shifting is OK. + if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { __ mov(result, Operand(dividend, ASR, shift)); + return; + } + + // Dividing by -1 is basically negation, unless we overflow. + if (divisor == -1) { + DeoptimizeIf(vs, instr->environment()); + return; } + + __ mov(result, Operand(kMinInt / divisor), LeaveCC, vs); + __ mov(result, Operand(dividend, ASR, shift), LeaveCC, vc); } @@ -1538,6 +1513,69 @@ void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. +void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { + HBinaryOperation* hdiv = instr->hydrogen(); + Register left = ToRegister(instr->dividend()); + Register right = ToRegister(instr->divisor()); + Register result = ToRegister(instr->result()); + + // Check for x / 0. + if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { + __ cmp(right, Operand::Zero()); + DeoptimizeIf(eq, instr->environment()); + } + + // Check for (0 / -x) that will produce negative zero. + if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { + Label positive; + if (!instr->hydrogen_value()->CheckFlag(HValue::kCanBeDivByZero)) { + // Do the test only if it hadn't be done above. + __ cmp(right, Operand::Zero()); + } + __ b(pl, &positive); + __ cmp(left, Operand::Zero()); + DeoptimizeIf(eq, instr->environment()); + __ bind(&positive); + } + + // Check for (kMinInt / -1). + if (hdiv->CheckFlag(HValue::kCanOverflow) && + (!CpuFeatures::IsSupported(SUDIV) || + !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { + // We don't need to check for overflow when truncating with sdiv + // support because, on ARM, sdiv kMinInt, -1 -> kMinInt. + __ cmp(left, Operand(kMinInt)); + __ cmp(right, Operand(-1), eq); + DeoptimizeIf(eq, instr->environment()); + } + + if (CpuFeatures::IsSupported(SUDIV)) { + CpuFeatureScope scope(masm(), SUDIV); + __ sdiv(result, left, right); + } else { + DoubleRegister vleft = ToDoubleRegister(instr->temp()); + DoubleRegister vright = double_scratch0(); + __ vmov(double_scratch0().low(), left); + __ vcvt_f64_s32(vleft, double_scratch0().low()); + __ vmov(double_scratch0().low(), right); + __ vcvt_f64_s32(vright, double_scratch0().low()); + __ vdiv(vleft, vleft, vright); // vleft now contains the result. + __ vcvt_s32_f64(double_scratch0().low(), vleft); + __ vmov(result, double_scratch0().low()); + } + + Label done; + Register remainder = scratch0(); + __ mls(remainder, result, right, left); + __ cmp(remainder, Operand::Zero()); + __ b(eq, &done); + __ eor(remainder, remainder, Operand(right)); + __ add(result, result, Operand(remainder, ASR, 31)); + __ bind(&done); +} + + void LCodeGen::DoMulI(LMulI* instr) { Register result = ToRegister(instr->result()); // Note that result may alias left. @@ -1835,9 +1873,16 @@ void LCodeGen::DoConstantE(LConstantE* instr) { void LCodeGen::DoConstantT(LConstantT* instr) { - Handle<Object> value = instr->value(isolate()); + Handle<Object> object = instr->value(isolate()); AllowDeferredHandleDereference smi_check; - __ Move(ToRegister(instr->result()), value); + if (instr->hydrogen()->HasObjectMap()) { + Handle<Map> object_map = instr->hydrogen()->ObjectMap().handle(); + ASSERT(object->IsHeapObject()); + ASSERT(!object_map->is_stable() || + *object_map == Handle<HeapObject>::cast(object)->map()); + USE(object_map); + } + __ Move(ToRegister(instr->result()), object); } @@ -2091,11 +2136,11 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->right()).is(r0)); ASSERT(ToRegister(instr->result()).is(r0)); - BinaryOpICStub stub(instr->op(), NO_OVERWRITE); + BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); // Block literal pool emission to ensure nop indicating no inlined smi code // is in the correct position. Assembler::BlockConstPoolScope block_const_pool(masm()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -2686,8 +2731,8 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) { ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. - InstanceofStub stub(InstanceofStub::kArgsInRegisters); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); __ cmp(r0, Operand::Zero()); __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); @@ -2783,7 +2828,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, flags | InstanceofStub::kCallSiteInlineCheck); flags = static_cast<InstanceofStub::Flags>( flags | InstanceofStub::kReturnTrueFalseObject); - InstanceofStub stub(flags); + InstanceofStub stub(isolate(), flags); PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); LoadContextFromDeferred(instr->context()); @@ -2805,7 +2850,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); __ nop(); } - CallCodeGeneric(stub.GetCode(isolate()), + CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); @@ -3309,7 +3354,8 @@ MemOperand LCodeGen::PrepareKeyedOperand(Register key, __ add(scratch0(), scratch0(), Operand(key, LSL, shift_size)); } else { ASSERT_EQ(-1, shift_size); - __ add(scratch0(), scratch0(), Operand(key, LSR, 1)); + // key can be negative, so using ASR here. + __ add(scratch0(), scratch0(), Operand(key, ASR, 1)); } return MemOperand(base, scratch0()); } @@ -3801,7 +3847,7 @@ void LCodeGen::DoPower(LPower* instr) { ASSERT(ToDoubleRegister(instr->result()).is(d2)); if (exponent_type.IsSmi()) { - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsTagged()) { Label no_deopt; @@ -3811,14 +3857,14 @@ void LCodeGen::DoPower(LPower* instr) { __ cmp(r6, Operand(ip)); DeoptimizeIf(ne, instr->environment()); __ bind(&no_deopt); - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsInteger32()) { - MathPowStub stub(MathPowStub::INTEGER); + MathPowStub stub(isolate(), MathPowStub::INTEGER); __ CallStub(&stub); } else { ASSERT(exponent_type.IsDouble()); - MathPowStub stub(MathPowStub::DOUBLE); + MathPowStub stub(isolate(), MathPowStub::DOUBLE); __ CallStub(&stub); } } @@ -3925,8 +3971,8 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ASSERT(ToRegister(instr->result()).is(r0)); int arity = instr->arity(); - CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -3938,8 +3984,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) { __ mov(r0, Operand(instr->arity())); // No cell in r2 for construct type feedback in optimized code __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } @@ -3957,8 +4003,8 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { : DONT_OVERRIDE; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { Label done; if (IsFastPackedElementsKind(kind)) { @@ -3970,18 +4016,20 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { __ b(eq, &packed_case); ElementsKind holey_kind = GetHoleyElementsKind(kind); - ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), + holey_kind, + override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ jmp(&done); __ bind(&packed_case); } - ArraySingleArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } } @@ -4028,7 +4076,6 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { return; } - Handle<Map> transition = instr->transition(); SmiCheck check_needed = instr->hydrogen()->value()->IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; @@ -4042,19 +4089,21 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { __ SmiTst(value); DeoptimizeIf(eq, instr->environment()); - // We know that value is a smi now, so we can omit the check below. + // We know now that value is not a smi, so we can omit the check below. check_needed = OMIT_SMI_CHECK; } } else if (representation.IsDouble()) { - ASSERT(transition.is_null()); ASSERT(access.IsInobject()); + ASSERT(!instr->hydrogen()->has_transition()); ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); DwVfpRegister value = ToDoubleRegister(instr->value()); __ vstr(value, FieldMemOperand(object, offset)); return; } - if (!transition.is_null()) { + if (instr->hydrogen()->has_transition()) { + Handle<Map> transition = instr->hydrogen()->transition_map(); + AddDeprecationDependency(transition); __ mov(scratch, Operand(transition)); __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); if (instr->hydrogen()->NeedsWriteBarrierForMap()) { @@ -4119,38 +4168,29 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { } -void LCodeGen::ApplyCheckIf(Condition condition, LBoundsCheck* check) { - if (FLAG_debug_code && check->hydrogen()->skip_check()) { +void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { + Condition cc = instr->hydrogen()->allow_equality() ? hi : hs; + if (instr->index()->IsConstantOperand()) { + Operand index = ToOperand(instr->index()); + Register length = ToRegister(instr->length()); + __ cmp(length, index); + cc = ReverseCondition(cc); + } else { + Register index = ToRegister(instr->index()); + Operand length = ToOperand(instr->length()); + __ cmp(index, length); + } + if (FLAG_debug_code && instr->hydrogen()->skip_check()) { Label done; - __ b(NegateCondition(condition), &done); + __ b(NegateCondition(cc), &done); __ stop("eliminated bounds check failed"); __ bind(&done); } else { - DeoptimizeIf(condition, check->environment()); + DeoptimizeIf(cc, instr->environment()); } } -void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { - if (instr->hydrogen()->skip_check()) return; - - if (instr->index()->IsConstantOperand()) { - int constant_index = - ToInteger32(LConstantOperand::cast(instr->index())); - if (instr->hydrogen()->length()->representation().IsSmi()) { - __ mov(ip, Operand(Smi::FromInt(constant_index))); - } else { - __ mov(ip, Operand(constant_index)); - } - __ cmp(ip, ToRegister(instr->length())); - } else { - __ cmp(ToRegister(instr->index()), ToRegister(instr->length())); - } - Condition condition = instr->hydrogen()->allow_equality() ? hi : hs; - ApplyCheckIf(condition, instr); -} - - void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { Register external_pointer = ToRegister(instr->elements()); Register key = no_reg; @@ -4381,15 +4421,15 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { scratch, GetLinkRegisterState(), kDontSaveFPRegs); } else { ASSERT(ToRegister(instr->context()).is(cp)); + ASSERT(object_reg.is(r0)); PushSafepointRegistersScope scope( this, Safepoint::kWithRegistersAndDoubles); - __ Move(r0, object_reg); __ Move(r1, to_map); bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; - TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); + TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); __ CallStub(&stub); RecordSafepointWithRegistersAndDoubles( - instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); + instr->pointer_map(), 0, Safepoint::kLazyDeopt); } __ bind(¬_applicable); } @@ -4409,9 +4449,10 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) { ASSERT(ToRegister(instr->context()).is(cp)); ASSERT(ToRegister(instr->left()).is(r1)); ASSERT(ToRegister(instr->right()).is(r0)); - StringAddStub stub(instr->hydrogen()->flags(), + StringAddStub stub(isolate(), + instr->hydrogen()->flags(), instr->hydrogen()->pretenure_flag()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -5106,7 +5147,14 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register object_; }; - if (instr->hydrogen()->CanOmitMapChecks()) return; + if (instr->hydrogen()->IsStabilityCheck()) { + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); + for (int i = 0; i < maps->size(); ++i) { + AddStabilityDependency(maps->at(i).handle()); + } + return; + } + Register map_reg = scratch0(); LOperand* input = instr->value(); @@ -5116,22 +5164,22 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); DeferredCheckMaps* deferred = NULL; - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { deferred = new(zone()) DeferredCheckMaps(this, instr, reg); __ bind(deferred->check_maps()); } - UniqueSet<Map> map_set = instr->hydrogen()->map_set(); + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); Label success; - for (int i = 0; i < map_set.size() - 1; i++) { - Handle<Map> map = map_set.at(i).handle(); + for (int i = 0; i < maps->size() - 1; i++) { + Handle<Map> map = maps->at(i).handle(); __ CompareMap(map_reg, map, &success); __ b(eq, &success); } - Handle<Map> map = map_set.at(map_set.size() - 1).handle(); + Handle<Map> map = maps->at(maps->size() - 1).handle(); __ CompareMap(map_reg, map, &success); - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { __ b(ne, deferred->entry()); } else { DeoptimizeIf(ne, instr->environment()); @@ -5301,7 +5349,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { __ push(size); } else { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - __ Push(Smi::FromInt(size)); + if (size >= 0 && size <= Smi::kMaxValue) { + __ Push(Smi::FromInt(size)); + } else { + // We should never get here at runtime => abort + __ stop("invalid allocation size"); + return; + } } int flags = AllocateDoubleAlignFlag::encode( @@ -5381,10 +5435,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && instr->hydrogen()->has_no_literals()) { - FastNewClosureStub stub(instr->hydrogen()->strict_mode(), + FastNewClosureStub stub(isolate(), + instr->hydrogen()->strict_mode(), instr->hydrogen()->is_generator()); __ mov(r2, Operand(instr->hydrogen()->shared_info())); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { __ mov(r2, Operand(instr->hydrogen()->shared_info())); __ mov(r1, Operand(pretenure ? factory()->true_value() @@ -5421,13 +5476,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, Handle<String> type_name) { Condition final_branch_condition = kNoCondition; Register scratch = scratch0(); - if (type_name->Equals(heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(type_name, factory->number_string())) { __ JumpIfSmi(input, true_label); __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); __ CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); final_branch_condition = eq; - } else if (type_name->Equals(heap()->string_string())) { + } else if (String::Equals(type_name, factory->string_string())) { __ JumpIfSmi(input, false_label); __ CompareObjectType(input, scratch, no_reg, FIRST_NONSTRING_TYPE); __ b(ge, false_label); @@ -5435,22 +5491,23 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, __ tst(scratch, Operand(1 << Map::kIsUndetectable)); final_branch_condition = eq; - } else if (type_name->Equals(heap()->symbol_string())) { + } else if (String::Equals(type_name, factory->symbol_string())) { __ JumpIfSmi(input, false_label); __ CompareObjectType(input, scratch, no_reg, SYMBOL_TYPE); final_branch_condition = eq; - } else if (type_name->Equals(heap()->boolean_string())) { + } else if (String::Equals(type_name, factory->boolean_string())) { __ CompareRoot(input, Heap::kTrueValueRootIndex); __ b(eq, true_label); __ CompareRoot(input, Heap::kFalseValueRootIndex); final_branch_condition = eq; - } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { + } else if (FLAG_harmony_typeof && + String::Equals(type_name, factory->null_string())) { __ CompareRoot(input, Heap::kNullValueRootIndex); final_branch_condition = eq; - } else if (type_name->Equals(heap()->undefined_string())) { + } else if (String::Equals(type_name, factory->undefined_string())) { __ CompareRoot(input, Heap::kUndefinedValueRootIndex); __ b(eq, true_label); __ JumpIfSmi(input, false_label); @@ -5460,7 +5517,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, __ tst(scratch, Operand(1 << Map::kIsUndetectable)); final_branch_condition = ne; - } else if (type_name->Equals(heap()->function_string())) { + } else if (String::Equals(type_name, factory->function_string())) { STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); Register type_reg = scratch; __ JumpIfSmi(input, false_label); @@ -5469,7 +5526,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, __ cmp(type_reg, Operand(JS_FUNCTION_PROXY_TYPE)); final_branch_condition = eq; - } else if (type_name->Equals(heap()->object_string())) { + } else if (String::Equals(type_name, factory->object_string())) { Register map = scratch; __ JumpIfSmi(input, false_label); if (!FLAG_harmony_typeof) { @@ -5607,12 +5664,12 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { __ LoadRoot(ip, Heap::kStackLimitRootIndex); __ cmp(sp, Operand(ip)); __ b(hs, &done); - PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); + Handle<Code> stack_check = isolate()->builtins()->StackCheck(); + PredictableCodeSizeScope predictable(masm(), + CallCodeSize(stack_check, RelocInfo::CODE_TARGET)); ASSERT(instr->context()->IsRegister()); ASSERT(ToRegister(instr->context()).is(cp)); - CallCode(isolate()->builtins()->StackCheck(), - RelocInfo::CODE_TARGET, - instr); + CallCode(stack_check, RelocInfo::CODE_TARGET, instr); __ bind(&done); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); @@ -5716,13 +5773,61 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { } +void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) { + PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); + __ Push(object); + __ Push(index); + __ mov(cp, Operand::Zero()); + __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); + RecordSafepointWithRegisters( + instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); + __ StoreToSafepointRegisterSlot(r0, result); +} + + void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { + class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { + public: + DeferredLoadMutableDouble(LCodeGen* codegen, + LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) + : LDeferredCode(codegen), + instr_(instr), + result_(result), + object_(object), + index_(index) { + } + virtual void Generate() V8_OVERRIDE { + codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); + } + virtual LInstruction* instr() V8_OVERRIDE { return instr_; } + private: + LLoadFieldByIndex* instr_; + Register result_; + Register object_; + Register index_; + }; + Register object = ToRegister(instr->object()); Register index = ToRegister(instr->index()); Register result = ToRegister(instr->result()); Register scratch = scratch0(); + DeferredLoadMutableDouble* deferred; + deferred = new(zone()) DeferredLoadMutableDouble( + this, instr, result, object, index); + Label out_of_object, done; + + __ tst(index, Operand(Smi::FromInt(1))); + __ b(ne, deferred->entry()); + __ mov(index, Operand(index, ASR, 1)); + __ cmp(index, Operand::Zero()); __ b(lt, &out_of_object); @@ -5738,6 +5843,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); + __ bind(deferred->exit()); __ bind(&done); } diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h index 21da500d0..3e05c328c 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.h +++ b/deps/v8/src/arm/lithium-codegen-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_ #define V8_ARM_LITHIUM_CODEGEN_ARM_H_ @@ -35,7 +12,7 @@ #include "lithium-codegen.h" #include "safepoint-table.h" #include "scopes.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { @@ -141,6 +118,10 @@ class LCodeGen: public LCodeGenBase { void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, Label* map_check); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); + void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index); // Parallel move support. void DoParallelMove(LParallelMove* move); @@ -182,8 +163,6 @@ class LCodeGen: public LCodeGenBase { int GetStackSlotCount() const { return chunk()->spill_slot_count(); } - void Abort(BailoutReason reason); - void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } void SaveCallerDoubles(); @@ -205,6 +184,8 @@ class LCodeGen: public LCodeGenBase { RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS }; + int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode); + void CallCode( Handle<Code> code, RelocInfo::Mode mode, @@ -258,7 +239,6 @@ class LCodeGen: public LCodeGenBase { LEnvironment* environment, Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition condition, LEnvironment* environment); - void ApplyCheckIf(Condition condition, LBoundsCheck* check); void AddToTranslation(LEnvironment* environment, Translation* translation, diff --git a/deps/v8/src/arm/lithium-gap-resolver-arm.cc b/deps/v8/src/arm/lithium-gap-resolver-arm.cc index 0c6b2adad..fe0ef144a 100644 --- a/deps/v8/src/arm/lithium-gap-resolver-arm.cc +++ b/deps/v8/src/arm/lithium-gap-resolver-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -33,11 +10,22 @@ namespace v8 { namespace internal { -static const Register kSavedValueRegister = { 9 }; +// We use the root register to spill a value while breaking a cycle in parallel +// moves. We don't need access to roots while resolving the move list and using +// the root register has two advantages: +// - It is not in crankshaft allocatable registers list, so it can't interfere +// with any of the moves we are resolving. +// - We don't need to push it on the stack, as we can reload it with its value +// once we have resolved a cycle. +#define kSavedValueRegister kRootRegister + LGapResolver::LGapResolver(LCodeGen* owner) : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false), - saved_destination_(NULL) { } + saved_destination_(NULL), need_to_restore_root_(false) { } + + +#define __ ACCESS_MASM(cgen_->masm()) void LGapResolver::Resolve(LParallelMove* parallel_move) { @@ -67,6 +55,12 @@ void LGapResolver::Resolve(LParallelMove* parallel_move) { } } + if (need_to_restore_root_) { + ASSERT(kSavedValueRegister.is(kRootRegister)); + __ InitializeRootRegister(); + need_to_restore_root_ = false; + } + moves_.Rewind(0); } @@ -155,20 +149,21 @@ void LGapResolver::Verify() { #endif } -#define __ ACCESS_MASM(cgen_->masm()) void LGapResolver::BreakCycle(int index) { - // We save in a register the value that should end up in the source of - // moves_[root_index]. After performing all moves in the tree rooted - // in that move, we save the value to that source. + // We save in a register the source of that move and we remember its + // destination. Then we mark this move as resolved so the cycle is + // broken and we can perform the other moves. ASSERT(moves_[index].destination()->Equals(moves_[root_index_].source())); ASSERT(!in_cycle_); in_cycle_ = true; LOperand* source = moves_[index].source(); saved_destination_ = moves_[index].destination(); if (source->IsRegister()) { + need_to_restore_root_ = true; __ mov(kSavedValueRegister, cgen_->ToRegister(source)); } else if (source->IsStackSlot()) { + need_to_restore_root_ = true; __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source)); } else if (source->IsDoubleRegister()) { __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source)); @@ -186,7 +181,6 @@ void LGapResolver::RestoreValue() { ASSERT(in_cycle_); ASSERT(saved_destination_ != NULL); - // Spilled value is in kSavedValueRegister or kSavedDoubleValueRegister. if (saved_destination_->IsRegister()) { __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister); } else if (saved_destination_->IsStackSlot()) { @@ -226,20 +220,15 @@ void LGapResolver::EmitMove(int index) { } else { ASSERT(destination->IsStackSlot()); MemOperand destination_operand = cgen_->ToMemOperand(destination); - if (in_cycle_) { - if (!destination_operand.OffsetIsUint12Encodable()) { - // ip is overwritten while saving the value to the destination. - // Therefore we can't use ip. It is OK if the read from the source - // destroys ip, since that happens before the value is read. - __ vldr(kScratchDoubleReg.low(), source_operand); - __ vstr(kScratchDoubleReg.low(), destination_operand); - } else { - __ ldr(ip, source_operand); - __ str(ip, destination_operand); - } + if (!destination_operand.OffsetIsUint12Encodable()) { + // ip is overwritten while saving the value to the destination. + // Therefore we can't use ip. It is OK if the read from the source + // destroys ip, since that happens before the value is read. + __ vldr(kScratchDoubleReg.low(), source_operand); + __ vstr(kScratchDoubleReg.low(), destination_operand); } else { - __ ldr(kSavedValueRegister, source_operand); - __ str(kSavedValueRegister, destination_operand); + __ ldr(ip, source_operand); + __ str(ip, destination_operand); } } @@ -261,14 +250,14 @@ void LGapResolver::EmitMove(int index) { } else { ASSERT(destination->IsStackSlot()); ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone. + need_to_restore_root_ = true; Representation r = cgen_->IsSmi(constant_source) ? Representation::Smi() : Representation::Integer32(); if (cgen_->IsInteger32(constant_source)) { __ mov(kSavedValueRegister, Operand(cgen_->ToRepresentation(constant_source, r))); } else { - __ Move(kSavedValueRegister, - cgen_->ToHandle(constant_source)); + __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source)); } __ str(kSavedValueRegister, cgen_->ToMemOperand(destination)); } @@ -290,16 +279,11 @@ void LGapResolver::EmitMove(int index) { ASSERT(destination->IsDoubleStackSlot()); MemOperand destination_operand = cgen_->ToMemOperand(destination); if (in_cycle_) { - // kSavedDoubleValueRegister was used to break the cycle, - // but kSavedValueRegister is free. - MemOperand source_high_operand = - cgen_->ToHighMemOperand(source); - MemOperand destination_high_operand = - cgen_->ToHighMemOperand(destination); - __ ldr(kSavedValueRegister, source_operand); - __ str(kSavedValueRegister, destination_operand); - __ ldr(kSavedValueRegister, source_high_operand); - __ str(kSavedValueRegister, destination_high_operand); + // kScratchDoubleReg was used to break the cycle. + __ vstm(db_w, sp, kScratchDoubleReg, kScratchDoubleReg); + __ vldr(kScratchDoubleReg, source_operand); + __ vstr(kScratchDoubleReg, destination_operand); + __ vldm(ia_w, sp, kScratchDoubleReg, kScratchDoubleReg); } else { __ vldr(kScratchDoubleReg, source_operand); __ vstr(kScratchDoubleReg, destination_operand); diff --git a/deps/v8/src/arm/lithium-gap-resolver-arm.h b/deps/v8/src/arm/lithium-gap-resolver-arm.h index 044c2864a..73914e4da 100644 --- a/deps/v8/src/arm/lithium-gap-resolver-arm.h +++ b/deps/v8/src/arm/lithium-gap-resolver-arm.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_LITHIUM_GAP_RESOLVER_ARM_H_ #define V8_ARM_LITHIUM_GAP_RESOLVER_ARM_H_ @@ -76,6 +53,10 @@ class LGapResolver V8_FINAL BASE_EMBEDDED { int root_index_; bool in_cycle_; LOperand* saved_destination_; + + // We use the root register as a scratch in a few places. When that happens, + // this flag is set to indicate that it needs to be restored. + bool need_to_restore_root_; }; } } // namespace v8::internal diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index 2bfe09f76..975262244 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <limits.h> // For LONG_MIN, LONG_MAX. @@ -100,19 +77,31 @@ int MacroAssembler::CallSize( int size = 2 * kInstrSize; Instr mov_instr = cond | MOV | LeaveCC; intptr_t immediate = reinterpret_cast<intptr_t>(target); - if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { + if (!Operand(immediate, rmode).is_single_instruction(isolate(), + this, + mov_instr)) { size += kInstrSize; } return size; } -int MacroAssembler::CallSizeNotPredictableCodeSize( - Address target, RelocInfo::Mode rmode, Condition cond) { +int MacroAssembler::CallStubSize( + CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { + return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); +} + + +int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate, + Address target, + RelocInfo::Mode rmode, + Condition cond) { int size = 2 * kInstrSize; Instr mov_instr = cond | MOV | LeaveCC; intptr_t immediate = reinterpret_cast<intptr_t>(target); - if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { + if (!Operand(immediate, rmode).is_single_instruction(isolate, + NULL, + mov_instr)) { size += kInstrSize; } return size; @@ -272,11 +261,11 @@ void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { void MacroAssembler::And(Register dst, Register src1, const Operand& src2, Condition cond) { if (!src2.is_reg() && - !src2.must_output_reloc_info(this) && + !src2.must_output_reloc_info(isolate(), this) && src2.immediate() == 0) { mov(dst, Operand::Zero(), LeaveCC, cond); - } else if (!src2.is_single_instruction(this) && - !src2.must_output_reloc_info(this) && + } else if (!src2.is_single_instruction(isolate(), this) && + !src2.must_output_reloc_info(isolate(), this) && CpuFeatures::IsSupported(ARMv7) && IsPowerOf2(src2.immediate() + 1)) { ubfx(dst, src1, 0, @@ -549,7 +538,8 @@ void MacroAssembler::RecordWrite(Register object, if (lr_status == kLRHasNotBeenSaved) { push(lr); } - RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); + RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, + fp_mode); CallStub(&stub); if (lr_status == kLRHasNotBeenSaved) { pop(lr); @@ -598,7 +588,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. } push(lr); StoreBufferOverflowStub store_buffer_overflow = - StoreBufferOverflowStub(fp_mode); + StoreBufferOverflowStub(isolate(), fp_mode); CallStub(&store_buffer_overflow); pop(lr); bind(&done); @@ -650,7 +640,7 @@ void MacroAssembler::PopSafepointRegisters() { void MacroAssembler::PushSafepointRegistersAndDoubles() { // Number of d-regs not known at snapshot time. - ASSERT(!Serializer::enabled()); + ASSERT(!Serializer::enabled(isolate())); PushSafepointRegisters(); // Only save allocatable registers. ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); @@ -664,7 +654,7 @@ void MacroAssembler::PushSafepointRegistersAndDoubles() { void MacroAssembler::PopSafepointRegistersAndDoubles() { // Number of d-regs not known at snapshot time. - ASSERT(!Serializer::enabled()); + ASSERT(!Serializer::enabled(isolate())); // Only save allocatable registers. ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); ASSERT(DwVfpRegister::NumReservedRegisters() == 2); @@ -706,7 +696,7 @@ MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { // Number of d-regs not known at snapshot time. - ASSERT(!Serializer::enabled()); + ASSERT(!Serializer::enabled(isolate())); // General purpose registers are pushed last on the stack. int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; @@ -789,6 +779,14 @@ void MacroAssembler::VFPEnsureFPSCRState(Register scratch) { // If needed, restore wanted bits of FPSCR. Label fpscr_done; vmrs(scratch); + if (emit_debug_code()) { + Label rounding_mode_correct; + tst(scratch, Operand(kVFPRoundingModeMask)); + b(eq, &rounding_mode_correct); + // Don't call Assert here, since Runtime_Abort could re-enter here. + stop("Default rounding mode not set"); + bind(&rounding_mode_correct); + } tst(scratch, Operand(kVFPDefaultNaNModeControlBit)); b(ne, &fpscr_done); orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit)); @@ -912,7 +910,7 @@ void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); } else { PredictableCodeSizeScope predictible_code_size_scope( - this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); + this, kNoCodeAgeSequenceLength); // The following three instructions must remain together and unmodified // for code aging to work properly. if (isolate()->IsCodePreAgingActive()) { @@ -989,7 +987,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { } if (FLAG_enable_ool_constant_pool) { str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); - LoadConstantPoolPointerRegister(); } mov(ip, Operand(CodeObject())); str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); @@ -1321,15 +1318,13 @@ void MacroAssembler::IsObjectNameType(Register object, } -#ifdef ENABLE_DEBUGGER_SUPPORT void MacroAssembler::DebugBreak() { mov(r0, Operand::Zero()); mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); - CEntryStub ces(1); + CEntryStub ces(isolate(), 1); ASSERT(AllowThisStubCall(&ces)); - Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); + Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } -#endif void MacroAssembler::PushTryHandler(StackHandler::Kind kind, @@ -1755,7 +1750,7 @@ void MacroAssembler::Allocate(int object_size, object_size -= bits; shift += 8; Operand bits_operand(bits); - ASSERT(bits_operand.is_single_instruction(this)); + ASSERT(bits_operand.is_single_instruction(isolate(), this)); add(scratch2, source, bits_operand, SetCC, cond); source = scratch2; cond = cc; @@ -2305,12 +2300,12 @@ void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. - Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id, cond); + Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); } void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { - Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond); + Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); } @@ -2339,10 +2334,7 @@ void MacroAssembler::CallApiFunctionAndReturn( Label profiler_disabled; Label end_profiler_check; - bool* is_profiling_flag = - isolate()->cpu_profiler()->is_profiling_address(); - STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); - mov(r9, Operand(reinterpret_cast<int32_t>(is_profiling_flag))); + mov(r9, Operand(ExternalReference::is_profiling_address(isolate()))); ldrb(r9, MemOperand(r9, 0)); cmp(r9, Operand(0)); b(eq, &profiler_disabled); @@ -2375,7 +2367,7 @@ void MacroAssembler::CallApiFunctionAndReturn( // Native call returns to the DirectCEntry stub which redirects to the // return address pushed on stack (could have moved after GC). // DirectCEntry stub itself is generated early and never moves. - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(this, r3); if (FLAG_log_timer_events) { @@ -2455,14 +2447,6 @@ bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { } -void MacroAssembler::IllegalOperation(int num_arguments) { - if (num_arguments > 0) { - add(sp, sp, Operand(num_arguments * kPointerSize)); - } - LoadRoot(r0, Heap::kUndefinedValueRootIndex); -} - - void MacroAssembler::IndexFromHash(Register hash, Register index) { // If the hash field contains an array index pick it out. The assert checks // that the constants for the maximum number of digits for an array index @@ -2580,7 +2564,7 @@ void MacroAssembler::TruncateDoubleToI(Register result, sub(sp, sp, Operand(kDoubleSize)); // Put input on stack. vstr(double_input, MemOperand(sp, 0)); - DoubleToIStub stub(sp, result, 0, true, true); + DoubleToIStub stub(isolate(), sp, result, 0, true, true); CallStub(&stub); add(sp, sp, Operand(kDoubleSize)); @@ -2602,7 +2586,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result, // If we fell through then inline version didn't succeed - call stub instead. push(lr); - DoubleToIStub stub(object, + DoubleToIStub stub(isolate(), + object, result, HeapNumber::kValueOffset - kHeapObjectTag, true, @@ -2657,10 +2642,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // If the expected number of arguments of the runtime function is // constant, we check that the actual number of arguments match the // expectation. - if (f->nargs >= 0 && f->nargs != num_arguments) { - IllegalOperation(num_arguments); - return; - } + CHECK(f->nargs < 0 || f->nargs == num_arguments); // TODO(1236192): Most runtime routines don't need the number of // arguments passed in because it is constant. At some point we @@ -2668,7 +2650,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // smarter. mov(r0, Operand(num_arguments)); mov(r1, Operand(ExternalReference(f, isolate()))); - CEntryStub stub(1, save_doubles); + CEntryStub stub(isolate(), 1, save_doubles); CallStub(&stub); } @@ -2678,7 +2660,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext, mov(r0, Operand(num_arguments)); mov(r1, Operand(ext)); - CEntryStub stub(1); + CEntryStub stub(isolate(), 1); CallStub(&stub); } @@ -2710,8 +2692,8 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); #endif mov(r1, Operand(builtin)); - CEntryStub stub(1); - Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); + CEntryStub stub(isolate(), 1); + Jump(stub.GetCode(), RelocInfo::CODE_TARGET); } @@ -3794,36 +3776,19 @@ void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { void MacroAssembler::ClampDoubleToUint8(Register result_reg, DwVfpRegister input_reg, LowDwVfpRegister double_scratch) { - Label above_zero; Label done; - Label in_bounds; - - VFPCompareAndSetFlags(input_reg, 0.0); - b(gt, &above_zero); - // Double value is less than zero, NaN or Inf, return 0. - mov(result_reg, Operand::Zero()); - b(al, &done); - - // Double value is >= 255, return 255. - bind(&above_zero); + // Handle inputs >= 255 (including +infinity). Vmov(double_scratch, 255.0, result_reg); - VFPCompareAndSetFlags(input_reg, double_scratch); - b(le, &in_bounds); mov(result_reg, Operand(255)); - b(al, &done); - - // In 0-255 range, round and truncate. - bind(&in_bounds); - // Save FPSCR. - vmrs(ip); - // Set rounding mode to round to the nearest integer by clearing bits[23:22]. - bic(result_reg, ip, Operand(kVFPRoundingModeMask)); - vmsr(result_reg); - vcvt_s32_f64(double_scratch.low(), input_reg, kFPSCRRounding); + VFPCompareAndSetFlags(input_reg, double_scratch); + b(ge, &done); + + // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest + // rounding mode will provide the correct result. + vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding); vmov(result_reg, double_scratch.low()); - // Restore FPSCR. - vmsr(ip); + bind(&done); } diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 6b6ecd32d..ba6f82571 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_ #define V8_ARM_MACRO_ASSEMBLER_ARM_H_ @@ -102,7 +79,11 @@ class MacroAssembler: public Assembler { static int CallSize(Register target, Condition cond = al); void Call(Register target, Condition cond = al); int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al); - static int CallSizeNotPredictableCodeSize(Address target, + int CallStubSize(CodeStub* stub, + TypeFeedbackId ast_id = TypeFeedbackId::None(), + Condition cond = al); + static int CallSizeNotPredictableCodeSize(Isolate* isolate, + Address target, RelocInfo::Mode rmode, Condition cond = al); void Call(Address target, RelocInfo::Mode rmode, @@ -627,12 +608,10 @@ class MacroAssembler: public Assembler { Register scratch, Label* fail); -#ifdef ENABLE_DEBUGGER_SUPPORT // --------------------------------------------------------------------------- // Debugger Support void DebugBreak(); -#endif // --------------------------------------------------------------------------- // Exception handling @@ -951,10 +930,6 @@ class MacroAssembler: public Assembler { } - // Generates code for reporting that an illegal operation has - // occurred. - void IllegalOperation(int num_arguments); - // Picks out an array index from the hash field. // Register use: // hash - holds the index's hash. Clobbered. @@ -1524,11 +1499,12 @@ class FrameAndConstantPoolScope { type_(type), old_has_frame_(masm->has_frame()), old_constant_pool_available_(masm->is_constant_pool_available()) { + // We only want to enable constant pool access for non-manual frame scopes + // to ensure the constant pool pointer is valid throughout the scope. + ASSERT(type_ != StackFrame::MANUAL && type_ != StackFrame::NONE); masm->set_has_frame(true); masm->set_constant_pool_available(true); - if (type_ != StackFrame::MANUAL && type_ != StackFrame::NONE) { - masm->EnterFrame(type, !old_constant_pool_available_); - } + masm->EnterFrame(type, !old_constant_pool_available_); } ~FrameAndConstantPoolScope() { diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc index cbc34e10b..e511554ef 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1043,7 +1020,7 @@ void RegExpMacroAssemblerARM::CallCheckStackGuardState(Register scratch) { ExternalReference stack_guard_check = ExternalReference::re_check_stack_guard_state(isolate()); __ mov(ip, Operand(stack_guard_check)); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm_, ip); // Drop the return address from the stack. @@ -1094,7 +1071,7 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address, ASSERT(*return_address <= re_code->instruction_start() + re_code->instruction_size()); - MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); + Object* result = Execution::HandleStackGuardInterrupt(isolate); if (*code_handle != re_code) { // Return address no longer valid int delta = code_handle->address() - re_code->address(); diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h index 8d9d515c7..4b18b274d 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.h +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_ #define V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_ diff --git a/deps/v8/src/arm/simulator-arm.cc b/deps/v8/src/arm/simulator-arm.cc index 8f7c1e8bb..80b46e04d 100644 --- a/deps/v8/src/arm/simulator-arm.cc +++ b/deps/v8/src/arm/simulator-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> #include <stdlib.h> @@ -773,8 +750,8 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) { z_flag_FPSCR_ = false; c_flag_FPSCR_ = false; v_flag_FPSCR_ = false; - FPSCR_rounding_mode_ = RZ; - FPSCR_default_NaN_mode_ = true; + FPSCR_rounding_mode_ = RN; + FPSCR_default_NaN_mode_ = false; inv_op_vfp_flag_ = false; div_zero_vfp_flag_ = false; @@ -2936,7 +2913,7 @@ void Simulator::DecodeTypeVFP(Instruction* instr) { } else if ((instr->Opc2Value() == 0xA) && (instr->Opc3Value() == 0x3) && (instr->Bit(8) == 1)) { // vcvt.f64.s32 Dd, Dd, #<fbits> - int fraction_bits = 32 - ((instr->Bit(5) << 4) | instr->Bits(3, 0)); + int fraction_bits = 32 - ((instr->Bits(3, 0) << 1) | instr->Bit(5)); int fixed_value = get_sinteger_from_s_register(vd * 2); double divide = 1 << fraction_bits; set_d_register_from_double(vd, fixed_value / divide); diff --git a/deps/v8/src/arm/simulator-arm.h b/deps/v8/src/arm/simulator-arm.h index 24d7fe58c..bbe87bcbe 100644 --- a/deps/v8/src/arm/simulator-arm.h +++ b/deps/v8/src/arm/simulator-arm.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Declares a Simulator for ARM instructions if we are not generating a native diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index c595e4274..fd53b9782 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -431,6 +408,22 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = descriptors->GetFieldType(descriptor); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + while (true) { + __ CompareMap(scratch1, it.Current(), &do_store); + it.Advance(); + if (it.Done()) { + __ b(ne, miss_label); + break; + } + __ b(eq, &do_store); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { Label do_store, heap_number; __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); @@ -593,6 +586,22 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = lookup->GetFieldType(); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + while (true) { + __ CompareMap(scratch1, it.Current(), &do_store); + it.Advance(); + if (it.Done()) { + __ b(ne, miss_label); + break; + } + __ b(eq, &do_store); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { // Load the double storage. if (index < 0) { @@ -801,7 +810,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, __ mov(api_function_address, Operand(ref)); // Jump to stub. - CallApiFunctionStub stub(is_store, call_data_undefined, argc); + CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); __ TailCallStub(&stub); } @@ -836,7 +845,9 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, int depth = 0; Handle<JSObject> current = Handle<JSObject>::null(); - if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); + if (type->IsConstant()) { + current = Handle<JSObject>::cast(type->AsConstant()->Value()); + } Handle<JSObject> prototype = Handle<JSObject>::null(); Handle<Map> current_map = receiver_map; Handle<Map> holder_map(holder->map()); @@ -859,7 +870,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(current.is_null() || - current->property_dictionary()->FindEntry(*name) == + current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); GenerateDictionaryNegativeLookup(masm(), miss, reg, name, @@ -999,15 +1010,17 @@ void LoadStubCompiler::GenerateLoadField(Register reg, Representation representation) { if (!reg.is(receiver())) __ mov(receiver(), reg); if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(field.is_inobject(holder), + LoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } else { - KeyedLoadFieldStub stub(field.is_inobject(holder), + KeyedLoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } } @@ -1061,7 +1074,7 @@ void LoadStubCompiler::GenerateLoadCallback( ExternalReference ref = ExternalReference(&fun, type, isolate()); __ mov(getter_address_reg, Operand(ref)); - CallApiGetterStub stub; + CallApiGetterStub stub(isolate()); __ TailCallStub(&stub); } @@ -1154,19 +1167,6 @@ void LoadStubCompiler::GenerateLoadInterceptor( } -void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { - Label success; - // Check that the object is a boolean. - __ LoadRoot(ip, Heap::kTrueValueRootIndex); - __ cmp(object, ip); - __ b(eq, &success); - __ LoadRoot(ip, Heap::kFalseValueRootIndex); - __ cmp(object, ip); - __ b(ne, miss); - __ bind(&success); -} - - Handle<Code> StoreStubCompiler::CompileStoreCallback( Handle<JSObject> object, Handle<JSObject> holder, diff --git a/deps/v8/src/arm64/assembler-arm64-inl.h b/deps/v8/src/arm64/assembler-arm64-inl.h index b56e3ed2a..3c17153f6 100644 --- a/deps/v8/src/arm64/assembler-arm64-inl.h +++ b/deps/v8/src/arm64/assembler-arm64-inl.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ @@ -434,6 +411,12 @@ Operand Operand::UntagSmiAndScale(Register smi, int scale) { } +MemOperand::MemOperand() + : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset), + shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { +} + + MemOperand::MemOperand(Register base, ptrdiff_t offset, AddrMode addrmode) : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode), shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { @@ -738,7 +721,7 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) { } -static const int kCodeAgeSequenceSize = 5 * kInstructionSize; +static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize; static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize; @@ -750,7 +733,6 @@ Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { Code* RelocInfo::code_age_stub() { ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); - ASSERT(!Code::IsYoungSequence(pc_)); // Read the stub entry point from the code age sequence. Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address)); @@ -759,7 +741,7 @@ Code* RelocInfo::code_age_stub() { void RelocInfo::set_code_age_stub(Code* stub) { ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); - ASSERT(!Code::IsYoungSequence(pc_)); + ASSERT(!Code::IsYoungSequence(stub->GetIsolate(), pc_)); // Overwrite the stub entry point in the code age sequence. This is loaded as // a literal so there is no need to call FlushICache here. Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; @@ -825,14 +807,12 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { visitor->VisitCell(this); } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { visitor->VisitExternalReference(this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence())) && isolate->debug()->has_break_points()) { visitor->VisitDebugTarget(this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { visitor->VisitRuntimeEntry(this); } @@ -850,14 +830,12 @@ void RelocInfo::Visit(Heap* heap) { StaticVisitor::VisitCell(heap, this); } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { StaticVisitor::VisitExternalReference(this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (heap->isolate()->debug()->has_break_points() && ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence()))) { StaticVisitor::VisitDebugTarget(heap, this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { StaticVisitor::VisitRuntimeEntry(this); } @@ -1199,11 +1177,16 @@ void Assembler::LoadRelocated(const CPURegister& rt, const Operand& operand) { } -inline void Assembler::CheckBuffer() { +inline void Assembler::CheckBufferSpace() { ASSERT(pc_ < (buffer_ + buffer_size_)); if (buffer_space() < kGap) { GrowBuffer(); } +} + + +inline void Assembler::CheckBuffer() { + CheckBufferSpace(); if (pc_offset() >= next_veneer_pool_check_) { CheckVeneerPool(false, true); } diff --git a/deps/v8/src/arm64/assembler-arm64.cc b/deps/v8/src/arm64/assembler-arm64.cc index 8bee92ccc..14f414557 100644 --- a/deps/v8/src/arm64/assembler-arm64.cc +++ b/deps/v8/src/arm64/assembler-arm64.cc @@ -271,14 +271,9 @@ void Operand::initialize_handle(Handle<Object> handle) { } -bool Operand::NeedsRelocation() const { +bool Operand::NeedsRelocation(Isolate* isolate) const { if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - return Serializer::enabled(); + return Serializer::enabled(isolate); } return !RelocInfo::IsNone(rmode_); @@ -456,6 +451,8 @@ void Assembler::bind(Label* label) { ASSERT(!label->is_near_linked()); ASSERT(!label->is_bound()); + DeleteUnresolvedBranchInfoForLabel(label); + // If the label is linked, the link chain looks something like this: // // |--I----I-------I-------L @@ -497,8 +494,6 @@ void Assembler::bind(Label* label) { ASSERT(label->is_bound()); ASSERT(!label->is_linked()); - - DeleteUnresolvedBranchInfoForLabel(label); } @@ -545,21 +540,50 @@ int Assembler::LinkAndGetByteOffsetTo(Label* label) { } +void Assembler::DeleteUnresolvedBranchInfoForLabelTraverse(Label* label) { + ASSERT(label->is_linked()); + CheckLabelLinkChain(label); + + int link_offset = label->pos(); + int link_pcoffset; + bool end_of_chain = false; + + while (!end_of_chain) { + Instruction * link = InstructionAt(link_offset); + link_pcoffset = link->ImmPCOffset(); + + // ADR instructions are not handled by veneers. + if (link->IsImmBranch()) { + int max_reachable_pc = InstructionOffset(link) + + Instruction::ImmBranchRange(link->BranchType()); + typedef std::multimap<int, FarBranchInfo>::iterator unresolved_info_it; + std::pair<unresolved_info_it, unresolved_info_it> range; + range = unresolved_branches_.equal_range(max_reachable_pc); + unresolved_info_it it; + for (it = range.first; it != range.second; ++it) { + if (it->second.pc_offset_ == link_offset) { + unresolved_branches_.erase(it); + break; + } + } + } + + end_of_chain = (link_pcoffset == 0); + link_offset = link_offset + link_pcoffset; + } +} + + void Assembler::DeleteUnresolvedBranchInfoForLabel(Label* label) { if (unresolved_branches_.empty()) { ASSERT(next_veneer_pool_check_ == kMaxInt); return; } - // Branches to this label will be resolved when the label is bound below. - std::multimap<int, FarBranchInfo>::iterator it_tmp, it; - it = unresolved_branches_.begin(); - while (it != unresolved_branches_.end()) { - it_tmp = it++; - if (it_tmp->second.label_ == label) { - CHECK(it_tmp->first >= pc_offset()); - unresolved_branches_.erase(it_tmp); - } + if (label->is_linked()) { + // Branches to this label will be resolved when the label is bound, normally + // just after all the associated info has been deleted. + DeleteUnresolvedBranchInfoForLabelTraverse(label); } if (unresolved_branches_.empty()) { next_veneer_pool_check_ = kMaxInt; @@ -645,7 +669,7 @@ int Assembler::ConstantPoolSizeAt(Instruction* instr) { void Assembler::ConstantPoolMarker(uint32_t size) { ASSERT(is_const_pool_blocked()); // + 1 is for the crash guard. - Emit(LDR_x_lit | ImmLLiteral(2 * size + 1) | Rt(xzr)); + Emit(LDR_x_lit | ImmLLiteral(size + 1) | Rt(xzr)); } @@ -1658,6 +1682,13 @@ void Assembler::frinta(const FPRegister& fd, } +void Assembler::frintm(const FPRegister& fd, + const FPRegister& fn) { + ASSERT(fd.SizeInBits() == fn.SizeInBits()); + FPDataProcessing1Source(fd, fn, FRINTM); +} + + void Assembler::frintn(const FPRegister& fd, const FPRegister& fn) { ASSERT(fd.SizeInBits() == fn.SizeInBits()); @@ -1872,7 +1903,7 @@ void Assembler::AddSub(const Register& rd, FlagsUpdate S, AddSubOp op) { ASSERT(rd.SizeInBits() == rn.SizeInBits()); - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); if (operand.IsImmediate()) { int64_t immediate = operand.immediate(); ASSERT(IsImmAddSub(immediate)); @@ -1912,7 +1943,7 @@ void Assembler::AddSubWithCarry(const Register& rd, ASSERT(rd.SizeInBits() == rn.SizeInBits()); ASSERT(rd.SizeInBits() == operand.reg().SizeInBits()); ASSERT(operand.IsShiftedRegister() && (operand.shift_amount() == 0)); - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) | Rn(rn) | Rd(rd)); } @@ -1933,10 +1964,7 @@ void Assembler::debug(const char* message, uint32_t code, Instr params) { #ifdef USE_SIMULATOR // Don't generate simulator specific code if we are building a snapshot, which // might be run on real hardware. - if (!Serializer::enabled()) { -#ifdef DEBUG - Serializer::TooLateToEnableNow(); -#endif + if (!Serializer::enabled(isolate())) { // The arguments to the debug marker need to be contiguous in memory, so // make sure we don't try to emit pools. BlockPoolsScope scope(this); @@ -1971,7 +1999,7 @@ void Assembler::Logical(const Register& rd, const Operand& operand, LogicalOp op) { ASSERT(rd.SizeInBits() == rn.SizeInBits()); - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); if (operand.IsImmediate()) { int64_t immediate = operand.immediate(); unsigned reg_size = rd.SizeInBits(); @@ -2023,7 +2051,7 @@ void Assembler::ConditionalCompare(const Register& rn, Condition cond, ConditionalCompareOp op) { Instr ccmpop; - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); if (operand.IsImmediate()) { int64_t immediate = operand.immediate(); ASSERT(IsImmConditionalCompare(immediate)); @@ -2138,7 +2166,7 @@ void Assembler::DataProcShiftedRegister(const Register& rd, Instr op) { ASSERT(operand.IsShiftedRegister()); ASSERT(rn.Is64Bits() || (rn.Is32Bits() && is_uint5(operand.shift_amount()))); - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); Emit(SF(rd) | op | Flags(S) | ShiftDP(operand.shift()) | ImmDPShift(operand.shift_amount()) | Rm(operand.reg()) | Rn(rn) | Rd(rd)); @@ -2150,7 +2178,7 @@ void Assembler::DataProcExtendedRegister(const Register& rd, const Operand& operand, FlagsUpdate S, Instr op) { - ASSERT(!operand.NeedsRelocation()); + ASSERT(!operand.NeedsRelocation(isolate())); Instr dest_reg = (S == SetFlags) ? Rd(rd) : RdSP(rd); Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) | ExtendMode(operand.extend()) | ImmExtendShift(operand.shift_amount()) | @@ -2489,12 +2517,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (!RelocInfo::IsNone(rmode)) { // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - if (!Serializer::enabled() && !emit_debug_code()) { + if (!Serializer::enabled(isolate()) && !emit_debug_code()) { return; } } @@ -2581,7 +2604,6 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { { // Block recursive calls to CheckConstPool and protect from veneer pools. BlockPoolsScope block_pools(this); - RecordComment("[ Constant Pool"); RecordConstPool(pool_size); // Emit jump over constant pool if necessary. @@ -2601,6 +2623,7 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { // beginning of the constant pool. // TODO(all): currently each relocated constant is 64 bits, consider adding // support for 32-bit entries. + RecordComment("[ Constant Pool"); ConstantPoolMarker(2 * num_pending_reloc_info_); ConstantPoolGuard(); @@ -2650,12 +2673,10 @@ bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) { void Assembler::RecordVeneerPool(int location_offset, int size) { -#ifdef ENABLE_DEBUGGER_SUPPORT RelocInfo rinfo(buffer_ + location_offset, RelocInfo::VENEER_POOL, static_cast<intptr_t>(size), NULL); reloc_info_writer.Write(&rinfo); -#endif } @@ -2789,22 +2810,91 @@ void Assembler::RecordDebugBreakSlot() { void Assembler::RecordConstPool(int size) { // We only need this for debugger support, to correctly compute offsets in the // code. -#ifdef ENABLE_DEBUGGER_SUPPORT RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); -#endif } -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { +Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { // No out-of-line constant pool support. - UNREACHABLE(); - return NULL; + ASSERT(!FLAG_enable_ool_constant_pool); + return isolate->factory()->empty_constant_pool_array(); } void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { // No out-of-line constant pool support. - UNREACHABLE(); + ASSERT(!FLAG_enable_ool_constant_pool); + return; +} + + +void PatchingAssembler::MovInt64(const Register& rd, int64_t imm) { + Label start; + bind(&start); + + ASSERT(rd.Is64Bits()); + ASSERT(!rd.IsSP()); + + for (unsigned i = 0; i < (rd.SizeInBits() / 16); i++) { + uint64_t imm16 = (imm >> (16 * i)) & 0xffffL; + movk(rd, imm16, 16 * i); + } + + ASSERT(SizeOfCodeGeneratedSince(&start) == + kMovInt64NInstrs * kInstructionSize); +} + + +void PatchingAssembler::PatchAdrFar(Instruction* target) { + // The code at the current instruction should be: + // adr rd, 0 + // nop (adr_far) + // nop (adr_far) + // nop (adr_far) + // movz scratch, 0 + // add rd, rd, scratch + + // Verify the expected code. + Instruction* expected_adr = InstructionAt(0); + CHECK(expected_adr->IsAdr() && (expected_adr->ImmPCRel() == 0)); + int rd_code = expected_adr->Rd(); + for (int i = 0; i < kAdrFarPatchableNNops; ++i) { + CHECK(InstructionAt((i + 1) * kInstructionSize)->IsNop(ADR_FAR_NOP)); + } + Instruction* expected_movz = + InstructionAt((kAdrFarPatchableNInstrs - 2) * kInstructionSize); + CHECK(expected_movz->IsMovz() && + (expected_movz->ImmMoveWide() == 0) && + (expected_movz->ShiftMoveWide() == 0)); + int scratch_code = expected_movz->Rd(); + Instruction* expected_add = + InstructionAt((kAdrFarPatchableNInstrs - 1) * kInstructionSize); + CHECK(expected_add->IsAddSubShifted() && + (expected_add->Mask(AddSubOpMask) == ADD) && + expected_add->SixtyFourBits() && + (expected_add->Rd() == rd_code) && (expected_add->Rn() == rd_code) && + (expected_add->Rm() == scratch_code) && + (static_cast<Shift>(expected_add->ShiftDP()) == LSL) && + (expected_add->ImmDPShift() == 0)); + + // Patch to load the correct address. + Label start; + bind(&start); + Register rd = Register::XRegFromCode(rd_code); + // If the target is in range, we only patch the adr. Otherwise we patch the + // nops with fixup instructions. + int target_offset = expected_adr->DistanceTo(target); + if (Instruction::IsValidPCRelOffset(target_offset)) { + adr(rd, target_offset); + for (int i = 0; i < kAdrFarPatchableNInstrs - 2; ++i) { + nop(ADR_FAR_NOP); + } + } else { + Register scratch = Register::XRegFromCode(scratch_code); + adr(rd, 0); + MovInt64(scratch, target_offset); + add(rd, rd, scratch); + } } diff --git a/deps/v8/src/arm64/assembler-arm64.h b/deps/v8/src/arm64/assembler-arm64.h index 1aae2f291..a3fbc98d9 100644 --- a/deps/v8/src/arm64/assembler-arm64.h +++ b/deps/v8/src/arm64/assembler-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_ASSEMBLER_ARM64_H_ #define V8_ARM64_ASSEMBLER_ARM64_H_ @@ -285,9 +262,9 @@ struct FPRegister : public CPURegister { static const unsigned kAllocatableLowRangeBegin = 0; static const unsigned kAllocatableLowRangeEnd = 14; static const unsigned kAllocatableHighRangeBegin = 16; - static const unsigned kAllocatableHighRangeEnd = 29; + static const unsigned kAllocatableHighRangeEnd = 28; - static const RegList kAllocatableFPRegisters = 0x3fff7fff; + static const RegList kAllocatableFPRegisters = 0x1fff7fff; // Gap between low and high ranges. static const int kAllocatableRangeGapSize = @@ -316,12 +293,12 @@ struct FPRegister : public CPURegister { ASSERT((kAllocatableLowRangeBegin == 0) && (kAllocatableLowRangeEnd == 14) && (kAllocatableHighRangeBegin == 16) && - (kAllocatableHighRangeEnd == 29)); + (kAllocatableHighRangeEnd == 28)); const char* const names[] = { "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23", - "d24", "d25", "d26", "d27", "d28", "d29" + "d24", "d25", "d26", "d27", "d28" }; return names[index]; } @@ -420,9 +397,11 @@ ALIAS_REGISTER(Register, wzr, w31); // Keeps the 0 double value. ALIAS_REGISTER(FPRegister, fp_zero, d15); // Crankshaft double scratch register. -ALIAS_REGISTER(FPRegister, crankshaft_fp_scratch, d30); -// MacroAssembler double scratch register. -ALIAS_REGISTER(FPRegister, fp_scratch, d31); +ALIAS_REGISTER(FPRegister, crankshaft_fp_scratch, d29); +// MacroAssembler double scratch registers. +ALIAS_REGISTER(FPRegister, fp_scratch, d30); +ALIAS_REGISTER(FPRegister, fp_scratch1, d30); +ALIAS_REGISTER(FPRegister, fp_scratch2, d31); #undef ALIAS_REGISTER @@ -514,8 +493,8 @@ class CPURegList { void Combine(const CPURegList& other); // Remove every register in the other CPURegList from this one. Registers that - // do not exist in this list are ignored. The type and size of the registers - // in the 'other' list must match those in this list. + // do not exist in this list are ignored. The type of the registers in the + // 'other' list must match those in this list. void Remove(const CPURegList& other); // Variants of Combine and Remove which take CPURegisters. @@ -670,7 +649,7 @@ class Operand { // Relocation information. RelocInfo::Mode rmode() const { return rmode_; } void set_rmode(RelocInfo::Mode rmode) { rmode_ = rmode; } - bool NeedsRelocation() const; + bool NeedsRelocation(Isolate* isolate) const; // Helpers inline static Operand UntagSmi(Register smi); @@ -690,6 +669,7 @@ class Operand { // MemOperand represents a memory operand in a load or store instruction. class MemOperand { public: + inline explicit MemOperand(); inline explicit MemOperand(Register base, ptrdiff_t offset = 0, AddrMode addrmode = Offset); @@ -1499,8 +1479,9 @@ class Assembler : public AssemblerBase { enum NopMarkerTypes { DEBUG_BREAK_NOP, INTERRUPT_CODE_NOP, + ADR_FAR_NOP, FIRST_NOP_MARKER = DEBUG_BREAK_NOP, - LAST_NOP_MARKER = INTERRUPT_CODE_NOP + LAST_NOP_MARKER = ADR_FAR_NOP }; void nop(NopMarkerTypes n) { @@ -1582,6 +1563,9 @@ class Assembler : public AssemblerBase { // FP round to integer (nearest with ties to away). void frinta(const FPRegister& fd, const FPRegister& fn); + // FP round to integer (toward minus infinity). + void frintm(const FPRegister& fd, const FPRegister& fn); + // FP round to integer (nearest with ties to even). void frintn(const FPRegister& fd, const FPRegister& fn); @@ -1688,6 +1672,10 @@ class Assembler : public AssemblerBase { return reinterpret_cast<Instruction*>(buffer_ + offset); } + ptrdiff_t InstructionOffset(Instruction* instr) const { + return reinterpret_cast<byte*>(instr) - buffer_; + } + // Register encoding. static Instr Rd(CPURegister rd) { ASSERT(rd.code() != kSPRegInternalCode); @@ -1761,6 +1749,13 @@ class Assembler : public AssemblerBase { inline static Instr ImmCondCmp(unsigned imm); inline static Instr Nzcv(StatusFlags nzcv); + static bool IsImmAddSub(int64_t immediate); + static bool IsImmLogical(uint64_t value, + unsigned width, + unsigned* n, + unsigned* imm_s, + unsigned* imm_r); + // MemOperand offset encoding. inline static Instr ImmLSUnsigned(int imm12); inline static Instr ImmLS(int imm9); @@ -1805,7 +1800,7 @@ class Assembler : public AssemblerBase { void CheckConstPool(bool force_emit, bool require_jump); // Allocate a constant pool of the correct size for the generated code. - MaybeObject* AllocateConstantPool(Heap* heap); + Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate); // Generate the constant pool for the generated code. void PopulateConstantPool(ConstantPoolArray* constant_pool); @@ -1874,11 +1869,6 @@ class Assembler : public AssemblerBase { unsigned imm_s, unsigned imm_r, LogicalOp op); - static bool IsImmLogical(uint64_t value, - unsigned width, - unsigned* n, - unsigned* imm_s, - unsigned* imm_r); void ConditionalCompare(const Register& rn, const Operand& operand, @@ -1909,7 +1899,6 @@ class Assembler : public AssemblerBase { const Operand& operand, FlagsUpdate S, AddSubOp op); - static bool IsImmAddSub(int64_t immediate); static bool IsImmFP32(float imm); static bool IsImmFP64(double imm); @@ -2034,6 +2023,7 @@ class Assembler : public AssemblerBase { } void GrowBuffer(); + void CheckBufferSpace(); void CheckBuffer(); // Pc offset of the next constant pool check. @@ -2176,6 +2166,11 @@ class Assembler : public AssemblerBase { // not later attempt (likely unsuccessfully) to patch it to branch directly to // the label. void DeleteUnresolvedBranchInfoForLabel(Label* label); + // This function deletes the information related to the label by traversing + // the label chain, and for each PC-relative instruction in the chain checking + // if pending unresolved information exists. Its complexity is proportional to + // the length of the label chain. + void DeleteUnresolvedBranchInfoForLabelTraverse(Label* label); private: PositionsRecorder positions_recorder_; @@ -2218,13 +2213,21 @@ class PatchingAssembler : public Assembler { size_t length = buffer_size_ - kGap; CPU::FlushICache(buffer_, length); } + + static const int kMovInt64NInstrs = 4; + void MovInt64(const Register& rd, int64_t imm); + + // See definition of PatchAdrFar() for details. + static const int kAdrFarPatchableNNops = kMovInt64NInstrs - 1; + static const int kAdrFarPatchableNInstrs = kAdrFarPatchableNNops + 3; + void PatchAdrFar(Instruction* target); }; class EnsureSpace BASE_EMBEDDED { public: explicit EnsureSpace(Assembler* assembler) { - assembler->CheckBuffer(); + assembler->CheckBufferSpace(); } }; diff --git a/deps/v8/src/arm64/builtins-arm64.cc b/deps/v8/src/arm64/builtins-arm64.cc index 01ac4cc5d..fec5fef99 100644 --- a/deps/v8/src/arm64/builtins-arm64.cc +++ b/deps/v8/src/arm64/builtins-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -370,13 +347,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Label rt_call, allocated; if (FLAG_inline_new) { Label undo_allocation; -#if ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = ExternalReference::debug_step_in_fp_address(isolate); __ Mov(x2, Operand(debug_step_in_fp)); __ Ldr(x2, MemOperand(x2)); __ Cbnz(x2, &rt_call); -#endif // Load the initial map and verify that it is in fact a map. Register init_map = x2; __ Ldr(init_map, @@ -785,7 +760,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // No type feedback cell is available. __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ CallStub(&stub); } else { ParameterCount actual(x0); @@ -912,7 +887,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { } // Jump to point after the code-age stub. - __ Add(x0, x0, kCodeAgeSequenceSize); + __ Add(x0, x0, kNoCodeAgeSequenceLength); __ Br(x0); } @@ -1280,7 +1255,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // There is not enough stack space, so use a builtin to throw an appropriate // error. __ Push(function, argc); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); // We should never return from the APPLY_OVERFLOW builtin. if (__ emit_debug_code()) { __ Unreachable(); @@ -1400,6 +1375,27 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { } +static void ArgumentAdaptorStackCheck(MacroAssembler* masm, + Label* stack_overflow) { + // ----------- S t a t e ------------- + // -- x0 : actual number of arguments + // -- x1 : function (passed through to callee) + // -- x2 : expected number of arguments + // ----------------------------------- + // Check the stack for overflow. + // We are not trying to catch interruptions (e.g. debug break and + // preemption) here, so the "real stack limit" is checked. + Label enough_stack_space; + __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); + // Make x10 the space we have left. The stack might already be overflowed + // here which will cause x10 to become negative. + __ Sub(x10, jssp, x10); + // Check if the arguments will overflow the stack. + __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2)); + __ B(le, stack_overflow); +} + + static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ SmiTag(x10, x0); __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); @@ -1433,6 +1429,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // -- x2 : expected number of arguments // ----------------------------------- + Label stack_overflow; + ArgumentAdaptorStackCheck(masm, &stack_overflow); + Register argc_actual = x0; // Excluding the receiver. Register argc_expected = x2; // Excluding the receiver. Register function = x1; @@ -1552,6 +1551,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // Call the entry point without adapting the arguments. __ Bind(&dont_adapt_arguments); __ Jump(code_entry); + + __ Bind(&stack_overflow); + { + FrameScope frame(masm, StackFrame::MANUAL); + EnterArgumentsAdaptorFrame(masm); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + __ Unreachable(); + } } diff --git a/deps/v8/src/arm64/code-stubs-arm64.cc b/deps/v8/src/arm64/code-stubs-arm64.cc index b097fc52e..a2dd22058 100644 --- a/deps/v8/src/arm64/code-stubs-arm64.cc +++ b/deps/v8/src/arm64/code-stubs-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -39,7 +16,6 @@ namespace internal { void FastNewClosureStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x2: function info static Register registers[] = { x2 }; @@ -51,7 +27,6 @@ void FastNewClosureStub::InitializeInterfaceDescriptor( void FastNewContextStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: function static Register registers[] = { x1 }; @@ -62,7 +37,6 @@ void FastNewContextStub::InitializeInterfaceDescriptor( void ToNumberStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value static Register registers[] = { x0 }; @@ -73,7 +47,6 @@ void ToNumberStub::InitializeInterfaceDescriptor( void NumberToStringStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value static Register registers[] = { x0 }; @@ -85,7 +58,6 @@ void NumberToStringStub::InitializeInterfaceDescriptor( void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x3: array literals array // x2: array literal index @@ -100,7 +72,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x3: object literals array // x2: object literal index @@ -115,7 +86,6 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x2: feedback vector // x3: call feedback slot @@ -127,7 +97,6 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor( void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: receiver // x0: key @@ -140,7 +109,6 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: receiver // x0: key @@ -153,7 +121,6 @@ void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( void RegExpConstructResultStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x2: length // x1: index (of last match) @@ -167,7 +134,6 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor( void LoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: receiver static Register registers[] = { x0 }; @@ -178,7 +144,6 @@ void LoadFieldStub::InitializeInterfaceDescriptor( void KeyedLoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: receiver static Register registers[] = { x1 }; @@ -189,7 +154,6 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor( void StringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { x0, x2 }; descriptor->register_param_count_ = 2; @@ -199,7 +163,6 @@ void StringLengthStub::InitializeInterfaceDescriptor( void KeyedStringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { x1, x0 }; descriptor->register_param_count_ = 2; @@ -209,7 +172,6 @@ void KeyedStringLengthStub::InitializeInterfaceDescriptor( void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x2: receiver // x1: key @@ -223,7 +185,6 @@ void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( void TransitionElementsKindStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value (js_array) // x1: to_map @@ -237,7 +198,6 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor( void CompareNilICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value to compare static Register registers[] = { x0 }; @@ -246,12 +206,11 @@ void CompareNilICStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(CompareNilIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); } static void InitializeArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // x1: function @@ -281,28 +240,24 @@ static void InitializeArrayConstructorDescriptor( void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeArrayConstructorDescriptor(descriptor, 0); } void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeArrayConstructorDescriptor(descriptor, 1); } void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeArrayConstructorDescriptor(descriptor, -1); } static void InitializeInternalArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // x1: constructor function @@ -331,28 +286,24 @@ static void InitializeInternalArrayConstructorDescriptor( void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeInternalArrayConstructorDescriptor(descriptor, 0); } void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeInternalArrayConstructorDescriptor(descriptor, 1); } void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeInternalArrayConstructorDescriptor(descriptor, -1); } void ToBooleanStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value static Register registers[] = { x0 }; @@ -360,12 +311,11 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); } void StoreGlobalStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: receiver // x2: key (unused) @@ -379,7 +329,6 @@ void StoreGlobalStub::InitializeInterfaceDescriptor( void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x0: value // x3: target map @@ -394,7 +343,6 @@ void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( void BinaryOpICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: left operand // x0: right operand @@ -403,12 +351,11 @@ void BinaryOpICStub::InitializeInterfaceDescriptor( descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); } void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x2: allocation site // x1: left operand @@ -422,7 +369,6 @@ void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( void StringAddStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { // x1: left operand // x0: right operand @@ -534,10 +480,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // Update the static counter each time a new code stub is generated. - Isolate* isolate = masm->isolate(); - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); - CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); + CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); int param_count = descriptor->register_param_count_; { // Call the runtime system in a fresh internal frame. @@ -1049,8 +994,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { __ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14, x15, &slow); - Isolate* isolate = masm->isolate(); - __ IncrementCounter(isolate->counters()->string_compare_native(), 1, x10, + __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10, x11); if (cond == eq) { StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs, @@ -1095,30 +1039,29 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { - // Preserve caller-saved registers x0-x7 and x10-x15. We don't care if x8, x9, - // ip0 and ip1 are corrupted by the call into C. CPURegList saved_regs = kCallerSaved; - saved_regs.Remove(ip0); - saved_regs.Remove(ip1); - saved_regs.Remove(x8); - saved_regs.Remove(x9); + CPURegList saved_fp_regs = kCallerSavedFP; // We don't allow a GC during a store buffer overflow so there is no need to // store the registers in any particular way, but we do have to store and // restore them. + + // We don't care if MacroAssembler scratch registers are corrupted. + saved_regs.Remove(*(masm->TmpList())); + saved_fp_regs.Remove(*(masm->FPTmpList())); + __ PushCPURegList(saved_regs); if (save_doubles_ == kSaveFPRegs) { - __ PushCPURegList(kCallerSavedFP); + __ PushCPURegList(saved_fp_regs); } AllowExternalCallThatCantCauseGC scope(masm); - __ Mov(x0, ExternalReference::isolate_address(masm->isolate())); + __ Mov(x0, ExternalReference::isolate_address(isolate())); __ CallCFunction( - ExternalReference::store_buffer_overflow_function(masm->isolate()), - 1, 0); + ExternalReference::store_buffer_overflow_function(isolate()), 1, 0); if (save_doubles_ == kSaveFPRegs) { - __ PopCPURegList(kCallerSavedFP); + __ PopCPURegList(saved_fp_regs); } __ PopCPURegList(saved_regs); __ Ret(); @@ -1127,10 +1070,10 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - StoreBufferOverflowStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); - StoreBufferOverflowStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); + StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } @@ -1230,8 +1173,8 @@ void MathPowStub::Generate(MacroAssembler* masm) { if (exponent_type_ != INTEGER) { // Detect integer exponents stored as doubles and handle those in the // integer fast-path. - __ TryConvertDoubleToInt64(exponent_integer, exponent_double, - scratch0_double, &exponent_is_integer); + __ TryRepresentDoubleAsInt64(exponent_integer, exponent_double, + scratch0_double, &exponent_is_integer); if (exponent_type_ == ON_STACK) { FPRegister half_double = d3; @@ -1314,7 +1257,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { AllowExternalCallThatCantCauseGC scope(masm); __ Mov(saved_lr, lr); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); __ Mov(lr, saved_lr); __ B(&done); @@ -1389,16 +1332,15 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ Bind(&call_runtime); // Put the arguments back on the stack. __ Push(base_tagged, exponent_tagged); - __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); + __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); // Return. __ Bind(&done); - __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1); - __ Str(result_double, - FieldMemOperand(result_tagged, HeapNumber::kValueOffset)); + __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, + result_double); ASSERT(result_tagged.is(x0)); __ IncrementCounter( - masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1); + isolate()->counters()->math_pow(), 1, scratch0, scratch1); __ Ret(); } else { AllowExternalCallThatCantCauseGC scope(masm); @@ -1406,12 +1348,12 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ Fmov(base_double, base_double_copy); __ Scvtf(exponent_double, exponent_integer); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); __ Mov(lr, saved_lr); __ Bind(&done); __ IncrementCounter( - masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1); + isolate()->counters()->math_pow(), 1, scratch0, scratch1); __ Ret(); } } @@ -1435,18 +1377,18 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { - StoreRegistersStateStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); - StoreRegistersStateStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); + StoreRegistersStateStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { - RestoreRegistersStateStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); - RestoreRegistersStateStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); + RestoreRegistersStateStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } @@ -1471,22 +1413,85 @@ bool CEntryStub::NeedsImmovableCode() { void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { - CEntryStub stub(1, kDontSaveFPRegs); - stub.GetCode(isolate); - CEntryStub stub_fp(1, kSaveFPRegs); - stub_fp.GetCode(isolate); + CEntryStub stub(isolate, 1, kDontSaveFPRegs); + stub.GetCode(); + CEntryStub stub_fp(isolate, 1, kSaveFPRegs); + stub_fp.GetCode(); } -void CEntryStub::GenerateCore(MacroAssembler* masm, - Label* throw_normal, - Label* throw_termination, - bool do_gc, - bool always_allocate) { - // x0 : Result parameter for PerformGC, if do_gc is true. +void CEntryStub::Generate(MacroAssembler* masm) { + // The Abort mechanism relies on CallRuntime, which in turn relies on + // CEntryStub, so until this stub has been generated, we have to use a + // fall-back Abort mechanism. + // + // Note that this stub must be generated before any use of Abort. + MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); + + ASM_LOCATION("CEntryStub::Generate entry"); + ProfileEntryHookStub::MaybeCallEntryHook(masm); + + // Register parameters: + // x0: argc (including receiver, untagged) + // x1: target + // + // The stack on entry holds the arguments and the receiver, with the receiver + // at the highest address: + // + // jssp]argc-1]: receiver + // jssp[argc-2]: arg[argc-2] + // ... ... + // jssp[1]: arg[1] + // jssp[0]: arg[0] + // + // The arguments are in reverse order, so that arg[argc-2] is actually the + // first argument to the target function and arg[0] is the last. + ASSERT(jssp.Is(__ StackPointer())); + const Register& argc_input = x0; + const Register& target_input = x1; + + // Calculate argv, argc and the target address, and store them in + // callee-saved registers so we can retry the call without having to reload + // these arguments. + // TODO(jbramley): If the first call attempt succeeds in the common case (as + // it should), then we might be better off putting these parameters directly + // into their argument registers, rather than using callee-saved registers and + // preserving them on the stack. + const Register& argv = x21; + const Register& argc = x22; + const Register& target = x23; + + // Derive argv from the stack pointer so that it points to the first argument + // (arg[argc-2]), or just below the receiver in case there are no arguments. + // - Adjust for the arg[] array. + Register temp_argv = x11; + __ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2)); + // - Adjust for the receiver. + __ Sub(temp_argv, temp_argv, 1 * kPointerSize); + + // Enter the exit frame. Reserve three slots to preserve x21-x23 callee-saved + // registers. + FrameScope scope(masm, StackFrame::MANUAL); + __ EnterExitFrame(save_doubles_, x10, 3); + ASSERT(csp.Is(__ StackPointer())); + + // Poke callee-saved registers into reserved space. + __ Poke(argv, 1 * kPointerSize); + __ Poke(argc, 2 * kPointerSize); + __ Poke(target, 3 * kPointerSize); + + // We normally only keep tagged values in callee-saved registers, as they + // could be pushed onto the stack by called stubs and functions, and on the + // stack they can confuse the GC. However, we're only calling C functions + // which can push arbitrary data onto the stack anyway, and so the GC won't + // examine that part of the stack. + __ Mov(argc, argc_input); + __ Mov(target, target_input); + __ Mov(argv, temp_argv); + // x21 : argv // x22 : argc - // x23 : target + // x23 : call target // // The stack (on entry) holds the arguments and the receiver, with the // receiver at the highest address: @@ -1516,44 +1521,19 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // // After an unsuccessful call, the exit frame and suchlike are left // untouched, and the stub either throws an exception by jumping to one of - // the provided throw_ labels, or it falls through. The failure details are - // passed through in x0. - ASSERT(csp.Is(__ StackPointer())); + // the exception_returned label. - Isolate* isolate = masm->isolate(); - - const Register& argv = x21; - const Register& argc = x22; - const Register& target = x23; - - if (do_gc) { - // Call Runtime::PerformGC, passing x0 (the result parameter for - // PerformGC) and x1 (the isolate). - __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction( - ExternalReference::perform_gc_function(isolate), 2, 0); - } - - ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(isolate); - if (always_allocate) { - __ Mov(x10, Operand(scope_depth)); - __ Ldr(x11, MemOperand(x10)); - __ Add(x11, x11, 1); - __ Str(x11, MemOperand(x10)); - } + ASSERT(csp.Is(__ StackPointer())); // Prepare AAPCS64 arguments to pass to the builtin. __ Mov(x0, argc); __ Mov(x1, argv); - __ Mov(x2, ExternalReference::isolate_address(isolate)); + __ Mov(x2, ExternalReference::isolate_address(isolate())); - // Store the return address on the stack, in the space previously allocated - // by EnterExitFrame. The return address is queried by - // ExitFrame::GetStateForFramePointer. Label return_location; __ Adr(x12, &return_location); __ Poke(x12, 0); + if (__ emit_debug_code()) { // Verify that the slot below fp[kSPOffset]-8 points to the return location // (currently in x12). @@ -1568,27 +1548,17 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // Call the builtin. __ Blr(target); __ Bind(&return_location); - const Register& result = x0; - - if (always_allocate) { - __ Mov(x10, Operand(scope_depth)); - __ Ldr(x11, MemOperand(x10)); - __ Sub(x11, x11, 1); - __ Str(x11, MemOperand(x10)); - } // x0 result The return code from the call. // x21 argv // x22 argc // x23 target - // - // If all of the result bits matching kFailureTagMask are '1', the result is - // a failure. Otherwise, it's an ordinary tagged object and the call was a - // success. - Label failure; - __ And(x10, result, kFailureTagMask); - __ Cmp(x10, kFailureTagMask); - __ B(&failure, eq); + const Register& result = x0; + + // Check result for exception sentinel. + Label exception_returned; + __ CompareRoot(result, Heap::kExceptionRootIndex); + __ B(eq, &exception_returned); // The call succeeded, so unwind the stack and return. @@ -1607,37 +1577,26 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // jssp[8]: Preserved x22 (used for argc). // jssp[0]: Preserved x21 (used for argv). __ Drop(x11); + __ AssertFPCRState(); __ Ret(); // The stack pointer is still csp if we aren't returning, and the frame // hasn't changed (except for the return address). __ SetStackPointer(csp); - __ Bind(&failure); - // The call failed, so check if we need to throw an exception, and fall - // through (to retry) otherwise. - - Label retry; - // x0 result The return code from the call, including the failure - // code and details. - // x21 argv - // x22 argc - // x23 target - // Refer to the Failure class for details of the bit layout. - STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); - __ Tst(result, kFailureTypeTagMask << kFailureTagSize); - __ B(eq, &retry); // RETRY_AFTER_GC + // Handling of exception. + __ Bind(&exception_returned); // Retrieve the pending exception. + ExternalReference pending_exception_address( + Isolate::kPendingExceptionAddress, isolate()); const Register& exception = result; const Register& exception_address = x11; - __ Mov(exception_address, - Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + __ Mov(exception_address, Operand(pending_exception_address)); __ Ldr(exception, MemOperand(exception_address)); // Clear the pending exception. - __ Mov(x10, Operand(isolate->factory()->the_hole_value())); + __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); __ Str(x10, MemOperand(exception_address)); // x0 exception The exception descriptor. @@ -1647,118 +1606,9 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // Special handling of termination exceptions, which are uncatchable by // JavaScript code. - __ Cmp(exception, Operand(isolate->factory()->termination_exception())); - __ B(eq, throw_termination); - - // Handle normal exception. - __ B(throw_normal); - - __ Bind(&retry); - // The result (x0) is passed through as the next PerformGC parameter. -} - - -void CEntryStub::Generate(MacroAssembler* masm) { - // The Abort mechanism relies on CallRuntime, which in turn relies on - // CEntryStub, so until this stub has been generated, we have to use a - // fall-back Abort mechanism. - // - // Note that this stub must be generated before any use of Abort. - MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); - - ASM_LOCATION("CEntryStub::Generate entry"); - ProfileEntryHookStub::MaybeCallEntryHook(masm); - - // Register parameters: - // x0: argc (including receiver, untagged) - // x1: target - // - // The stack on entry holds the arguments and the receiver, with the receiver - // at the highest address: - // - // jssp]argc-1]: receiver - // jssp[argc-2]: arg[argc-2] - // ... ... - // jssp[1]: arg[1] - // jssp[0]: arg[0] - // - // The arguments are in reverse order, so that arg[argc-2] is actually the - // first argument to the target function and arg[0] is the last. - ASSERT(jssp.Is(__ StackPointer())); - const Register& argc_input = x0; - const Register& target_input = x1; - - // Calculate argv, argc and the target address, and store them in - // callee-saved registers so we can retry the call without having to reload - // these arguments. - // TODO(jbramley): If the first call attempt succeeds in the common case (as - // it should), then we might be better off putting these parameters directly - // into their argument registers, rather than using callee-saved registers and - // preserving them on the stack. - const Register& argv = x21; - const Register& argc = x22; - const Register& target = x23; - - // Derive argv from the stack pointer so that it points to the first argument - // (arg[argc-2]), or just below the receiver in case there are no arguments. - // - Adjust for the arg[] array. - Register temp_argv = x11; - __ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2)); - // - Adjust for the receiver. - __ Sub(temp_argv, temp_argv, 1 * kPointerSize); - - // Enter the exit frame. Reserve three slots to preserve x21-x23 callee-saved - // registers. - FrameScope scope(masm, StackFrame::MANUAL); - __ EnterExitFrame(save_doubles_, x10, 3); - ASSERT(csp.Is(__ StackPointer())); - - // Poke callee-saved registers into reserved space. - __ Poke(argv, 1 * kPointerSize); - __ Poke(argc, 2 * kPointerSize); - __ Poke(target, 3 * kPointerSize); - - // We normally only keep tagged values in callee-saved registers, as they - // could be pushed onto the stack by called stubs and functions, and on the - // stack they can confuse the GC. However, we're only calling C functions - // which can push arbitrary data onto the stack anyway, and so the GC won't - // examine that part of the stack. - __ Mov(argc, argc_input); - __ Mov(target, target_input); - __ Mov(argv, temp_argv); - - Label throw_normal; - Label throw_termination; - - // Call the runtime function. - GenerateCore(masm, - &throw_normal, - &throw_termination, - false, - false); - - // If successful, the previous GenerateCore will have returned to the - // calling code. Otherwise, we fall through into the following. - - // Do space-specific GC and retry runtime call. - GenerateCore(masm, - &throw_normal, - &throw_termination, - true, - false); - - // Do full GC and retry runtime call one final time. - __ Mov(x0, reinterpret_cast<uint64_t>(Failure::InternalError())); - GenerateCore(masm, - &throw_normal, - &throw_termination, - true, - true); - - { FrameScope scope(masm, StackFrame::MANUAL); - __ CallCFunction( - ExternalReference::out_of_memory_function(masm->isolate()), 0); - } + Label throw_termination_exception; + __ Cmp(exception, Operand(isolate()->factory()->termination_exception())); + __ B(eq, &throw_termination_exception); // We didn't execute a return case, so the stack frame hasn't been updated // (except for the return address slot). However, we don't need to initialize @@ -1766,24 +1616,18 @@ void CEntryStub::Generate(MacroAssembler* masm) { // unwinds the stack. __ SetStackPointer(jssp); - // Throw exceptions. - // If we throw an exception, we can end up re-entering CEntryStub before we - // pop the exit frame, so need to ensure that x21-x23 contain GC-safe values - // here. - - __ Bind(&throw_termination); - ASM_LOCATION("Throw termination"); + ASM_LOCATION("Throw normal"); __ Mov(argv, 0); __ Mov(argc, 0); __ Mov(target, 0); - __ ThrowUncatchable(x0, x10, x11, x12, x13); + __ Throw(x0, x10, x11, x12, x13); - __ Bind(&throw_normal); - ASM_LOCATION("Throw normal"); + __ Bind(&throw_termination_exception); + ASM_LOCATION("Throw termination"); __ Mov(argv, 0); __ Mov(argc, 0); __ Mov(target, 0); - __ Throw(x0, x10, x11, x12, x13); + __ ThrowUncatchable(x0, x10, x11, x12, x13); } @@ -1817,20 +1661,22 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ Mov(jssp, csp); __ SetStackPointer(jssp); + // Configure the FPCR. We don't restore it, so this is technically not allowed + // according to AAPCS64. However, we only set default-NaN mode and this will + // be harmless for most C code. Also, it works for ARM. + __ ConfigureFPCR(); + ProfileEntryHookStub::MaybeCallEntryHook(masm); // Set up the reserved register for 0.0. __ Fmov(fp_zero, 0.0); // Build an entry frame (see layout below). - Isolate* isolate = masm->isolate(); - - // Build an entry frame. int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used. __ Mov(x13, bad_frame_pointer); __ Mov(x12, Smi::FromInt(marker)); - __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate)); + __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate())); __ Ldr(x10, MemOperand(x11)); __ Push(x13, xzr, x12, x10); @@ -1840,7 +1686,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Push the JS entry frame marker. Also set js_entry_sp if this is the // outermost JS call. Label non_outermost_js, done; - ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); + ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); __ Mov(x10, ExternalReference(js_entry_sp)); __ Ldr(x11, MemOperand(x10)); __ Cbnz(x11, &non_outermost_js); @@ -1880,10 +1726,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // fp will be invalid because the PushTryHandler below sets it to 0 to // signal the existence of the JSEntry frame. __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); } __ Str(code_entry, MemOperand(x10)); - __ Mov(x0, Operand(reinterpret_cast<int64_t>(Failure::Exception()))); + __ LoadRoot(x0, Heap::kExceptionRootIndex); __ B(&exit); // Invoke: Link this frame into the handler chain. There's only one @@ -1896,9 +1742,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // saved values before returning a failure to C. // Clear any pending exceptions. - __ Mov(x10, Operand(isolate->factory()->the_hole_value())); + __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); __ Str(x10, MemOperand(x11)); // Invoke the function by calling through the JS entry trampoline builtin. @@ -1913,7 +1759,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // x4: argv. ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline : Builtins::kJSEntryTrampoline, - isolate); + isolate()); __ Mov(x10, entry); // Call the JSEntryTrampoline. @@ -1946,7 +1792,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Restore the top frame descriptors from the stack. __ Pop(x10); - __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate)); + __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate())); __ Str(x10, MemOperand(x11)); // Reset the stack to the callee saved registers. @@ -1973,7 +1819,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { // ----------------------------------- Register key = x0; receiver = x1; - __ Cmp(key, Operand(masm->isolate()->factory()->prototype_string())); + __ Cmp(key, Operand(isolate()->factory()->prototype_string())); __ B(ne, &miss); } else { ASSERT(kind() == Code::LOAD_IC); @@ -2131,7 +1977,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ Mov(result, res_false); // Null is not instance of anything. - __ Cmp(object_type, Operand(masm->isolate()->factory()->null_value())); + __ Cmp(object_type, Operand(isolate()->factory()->null_value())); __ B(ne, &object_not_null); __ Ret(); @@ -2732,11 +2578,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { const int kJSRegExpOffset = 7 * kPointerSize; // Ensure that a RegExp stack is allocated. - Isolate* isolate = masm->isolate(); ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(isolate); + ExternalReference::address_of_regexp_stack_memory_address(isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(isolate); + ExternalReference::address_of_regexp_stack_memory_size(isolate()); __ Mov(x10, address_of_regexp_stack_memory_size); __ Ldr(x10, MemOperand(x10)); __ Cbz(x10, &runtime); @@ -2898,7 +2743,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ JumpIfSmi(code_object, &runtime); // All checks done. Now push arguments for native regexp code. - __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, + __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, x10, x11); @@ -2914,7 +2759,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // csp[0]: Space for the return address placed by DirectCEntryStub. // csp[8]: Argument 9, the current isolate address. - __ Mov(x10, ExternalReference::isolate_address(isolate)); + __ Mov(x10, ExternalReference::isolate_address(isolate())); __ Poke(x10, kPointerSize); Register length = w11; @@ -2963,7 +2808,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ Add(x3, x2, Operand(w10, UXTW)); // Argument 5 (x4): static offsets vector buffer. - __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate)); + __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate())); // Argument 6 (x5): Set the number of capture registers to zero to force // global regexps to behave as non-global. This stub is not used for global @@ -2982,7 +2827,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Locate the code entry and call it. __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm, code_object); __ LeaveExitFrame(false, x10, true); @@ -3068,7 +2913,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code // and fill the last match info. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(isolate); + ExternalReference::address_of_static_offsets_vector(isolate()); __ Mov(offsets_vector_index, address_of_static_offsets_vector); Label next_capture, done; @@ -3107,10 +2952,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // A stack overflow (on the backtrack stack) may have occured // in the RegExp code but no exception has been created yet. // If there is no pending exception, handle that in the runtime system. - __ Mov(x10, Operand(isolate->factory()->the_hole_value())); + __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); __ Ldr(exception_value, MemOperand(x11)); __ Cmp(x10, exception_value); __ B(eq, &runtime); @@ -3129,7 +2974,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ ThrowUncatchable(exception_value, x10, x11, x12, x13); __ Bind(&failure); - __ Mov(x0, Operand(masm->isolate()->factory()->null_value())); + __ Mov(x0, Operand(isolate()->factory()->null_value())); __ PopCPURegList(used_callee_saved_registers); // Drop the 4 arguments of the stub from the stack. __ Drop(4); @@ -3259,7 +3104,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, // slot. { FrameScope scope(masm, StackFrame::INTERNAL); - CreateAllocationSiteStub create_stub; + CreateAllocationSiteStub create_stub(masm->isolate()); // Arguments register must be smi-tagged to call out. __ SmiTag(argc); @@ -3294,14 +3139,64 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, } +static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { + // Do not transform the receiver for strict mode functions. + __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); + __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); + + // Do not transform the receiver for native (Compilerhints already in x3). + __ Tbnz(w4, SharedFunctionInfo::kNative, cont); +} + + +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Register function, + Register type, + Label* non_function) { + // Check for function proxy. + // x10 : function type. + __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, non_function); + __ Push(function); // put proxy as additional argument + __ Mov(x0, argc + 1); + __ Mov(x2, 0); + __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); + { + Handle<Code> adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ Bind(non_function); + __ Poke(function, argc * kXRegSize); + __ Mov(x0, argc); // Set up the number of arguments. + __ Mov(x2, 0); + __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); +} + + +static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { + // Wrap the receiver and patch it back onto the stack. + { FrameScope frame_scope(masm, StackFrame::INTERNAL); + __ Push(x1, x3); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ Pop(x1); + } + __ Poke(x0, argc * kPointerSize); + __ B(cont); +} + + void CallFunctionStub::Generate(MacroAssembler* masm) { ASM_LOCATION("CallFunctionStub::Generate"); // x1 function the function to call - // x2 : feedback vector - // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol) + Register function = x1; - Register cache_cell = x2; - Register slot = x3; Register type = x4; Label slow, non_function, wrap, cont; @@ -3314,33 +3209,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Goto slow case if we do not have a function. __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); - - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5); - // Type information was updated. Because we may call Array, which - // expects either undefined or an AllocationSite in ebx we need - // to set ebx to undefined. - __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex); - } } // Fast-case: Invoke the function now. // x1 function pushed function - ParameterCount actual(argc_); + int argc = argc_; + ParameterCount actual(argc); if (CallAsMethod()) { if (NeedsChecks()) { - // Do not transform the receiver for strict mode functions. - __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); - __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); - __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont); - - // Do not transform the receiver for native (Compilerhints already in x3). - __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); + EmitContinueIfStrictOrNative(masm, &cont); } // Compute the receiver in sloppy mode. - __ Peek(x3, argc_ * kPointerSize); + __ Peek(x3, argc * kPointerSize); if (NeedsChecks()) { __ JumpIfSmi(x3, &wrap); @@ -3351,6 +3233,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ Bind(&cont); } + __ InvokeFunction(function, actual, JUMP_FUNCTION, @@ -3359,51 +3242,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { if (NeedsChecks()) { // Slow-case: Non-function called. __ Bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable object - // (megamorphic symbol) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), - masm->isolate()->heap()->megamorphic_symbol()); - __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot, - kPointerSizeLog2)); - __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex); - __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize)); - } - // Check for function proxy. - // x10 : function type. - __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); - __ Push(function); // put proxy as additional argument - __ Mov(x0, argc_ + 1); - __ Mov(x2, 0); - __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); - { - Handle<Code> adaptor = - masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); - __ Jump(adaptor, RelocInfo::CODE_TARGET); - } - - // CALL_NON_FUNCTION expects the non-function callee as receiver (instead - // of the original receiver from the call site). - __ Bind(&non_function); - __ Poke(function, argc_ * kXRegSize); - __ Mov(x0, argc_); // Set up the number of arguments. - __ Mov(x2, 0); - __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), - RelocInfo::CODE_TARGET); + EmitSlowCase(masm, argc, function, type, &non_function); } if (CallAsMethod()) { __ Bind(&wrap); - // Wrap the receiver and patch it back onto the stack. - { FrameScope frame_scope(masm, StackFrame::INTERNAL); - __ Push(x1, x3); - __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); - __ Pop(x1); - } - __ Poke(x0, argc_ * kPointerSize); - __ B(&cont); + EmitWrapCase(masm, argc, &cont); } } @@ -3473,11 +3317,126 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ Bind(&do_call); // Set expected number of arguments to zero (not changing x0). __ Mov(x2, 0); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), RelocInfo::CODE_TARGET); } +static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { + __ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + __ Ldr(vector, FieldMemOperand(vector, + JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(vector, FieldMemOperand(vector, + SharedFunctionInfo::kFeedbackVectorOffset)); +} + + +void CallICStub::Generate(MacroAssembler* masm) { + ASM_LOCATION("CallICStub"); + + // x1 - function + // x3 - slot id (Smi) + Label extra_checks_or_miss, slow_start; + Label slow, non_function, wrap, cont; + Label have_js_function; + int argc = state_.arg_count(); + ParameterCount actual(argc); + + Register function = x1; + Register feedback_vector = x2; + Register index = x3; + Register type = x4; + + EmitLoadTypeFeedbackVector(masm, feedback_vector); + + // The checks. First, does x1 match the recorded monomorphic target? + __ Add(x4, feedback_vector, + Operand::UntagSmiAndScale(index, kPointerSizeLog2)); + __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); + + __ Cmp(x4, function); + __ B(ne, &extra_checks_or_miss); + + __ bind(&have_js_function); + if (state_.CallAsMethod()) { + EmitContinueIfStrictOrNative(masm, &cont); + + // Compute the receiver in sloppy mode. + __ Peek(x3, argc * kPointerSize); + + __ JumpIfSmi(x3, &wrap); + __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); + + __ Bind(&cont); + } + + __ InvokeFunction(function, + actual, + JUMP_FUNCTION, + NullCallWrapper()); + + __ bind(&slow); + EmitSlowCase(masm, argc, function, type, &non_function); + + if (state_.CallAsMethod()) { + __ bind(&wrap); + EmitWrapCase(masm, argc, &cont); + } + + __ bind(&extra_checks_or_miss); + Label miss; + + __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow_start); + __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss); + + if (!FLAG_trace_ic) { + // We are going megamorphic, and we don't want to visit the runtime. + __ Add(x4, feedback_vector, + Operand::UntagSmiAndScale(index, kPointerSizeLog2)); + __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex); + __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize)); + __ B(&slow_start); + } + + // We are here because tracing is on or we are going monomorphic. + __ bind(&miss); + GenerateMiss(masm); + + // the slow case + __ bind(&slow_start); + + // Check that the function is really a JavaScript function. + __ JumpIfSmi(function, &non_function); + + // Goto slow case if we do not have a function. + __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); + __ B(&have_js_function); +} + + +void CallICStub::GenerateMiss(MacroAssembler* masm) { + ASM_LOCATION("CallICStub[Miss]"); + + // Get the receiver of the function from the stack; 1 ~ return address. + __ Peek(x4, (state_.arg_count() + 1) * kPointerSize); + + { + FrameScope scope(masm, StackFrame::INTERNAL); + + // Push the receiver and the function and feedback info. + __ Push(x4, x1, x2, x3); + + // Call the entry. + ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), + masm->isolate()); + __ CallExternalReference(miss, 4); + + // Move result to edi and exit the internal frame. + __ Mov(x1, x0); + } +} + + void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { // If the receiver is a smi trigger the non-string case. __ JumpIfSmi(object_, receiver_not_string_); @@ -3663,9 +3622,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ Ret(); __ Bind(&unordered); - ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, + ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, CompareIC::GENERIC); - __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ Bind(&maybe_undefined1); if (Token::IsOrderedRelationalCompareOp(op_)) { @@ -3905,7 +3864,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { Register stub_entry = x11; { ExternalReference miss = - ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); + ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); FrameScope scope(masm, StackFrame::INTERNAL); Register op = x10; @@ -4219,7 +4178,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); __ Bind(&return_x0); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); __ Drop(3); __ Ret(); @@ -4366,7 +4325,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop( void StringCompareStub::Generate(MacroAssembler* masm) { Label runtime; - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); // Stack frame on entry. // sp[0]: right string @@ -4406,227 +4365,17 @@ void StringCompareStub::Generate(MacroAssembler* masm) { } -void ArrayPushStub::Generate(MacroAssembler* masm) { - Register receiver = x0; - - int argc = arguments_count(); - - if (argc == 0) { - // Nothing to do, just return the length. - __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ Drop(argc + 1); - __ Ret(); - return; - } - - Isolate* isolate = masm->isolate(); - - if (argc != 1) { - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - Label call_builtin, attempt_to_grow_elements, with_write_barrier; - - Register elements_length = x8; - Register length = x7; - Register elements = x6; - Register end_elements = x5; - Register value = x4; - // Get the elements array of the object. - __ Ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check that the elements are in fast mode and writable. - __ CheckMap(elements, - x10, - Heap::kFixedArrayMapRootIndex, - &call_builtin, - DONT_DO_SMI_CHECK); - } - - // Get the array's length and calculate new length. - __ Ldr(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); - STATIC_ASSERT(kSmiTag == 0); - __ Add(length, length, Smi::FromInt(argc)); - - // Check if we could survive without allocation. - __ Ldr(elements_length, - FieldMemOperand(elements, FixedArray::kLengthOffset)); - __ Cmp(length, elements_length); - - const int kEndElementsOffset = - FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - __ B(gt, &attempt_to_grow_elements); - - // Check if value is a smi. - __ Peek(value, (argc - 1) * kPointerSize); - __ JumpIfNotSmi(value, &with_write_barrier); - - // Store the value. - // We may need a register containing the address end_elements below, - // so write back the value in end_elements. - __ Add(end_elements, elements, - Operand::UntagSmiAndScale(length, kPointerSizeLog2)); - __ Str(value, MemOperand(end_elements, kEndElementsOffset, PreIndex)); - } else { - __ B(gt, &call_builtin); - - __ Peek(value, (argc - 1) * kPointerSize); - __ StoreNumberToDoubleElements(value, length, elements, x10, d0, d1, - &call_builtin, argc * kDoubleSize); - } - - // Save new length. - __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); - - // Return length. - __ Drop(argc + 1); - __ Mov(x0, length); - __ Ret(); - - if (IsFastDoubleElementsKind(elements_kind())) { - __ Bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ Bind(&with_write_barrier); - - if (IsFastSmiElementsKind(elements_kind())) { - if (FLAG_trace_elements_transitions) { - __ B(&call_builtin); - } - - __ Ldr(x10, FieldMemOperand(value, HeapObject::kMapOffset)); - __ JumpIfHeapNumber(x10, &call_builtin); - - ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) - ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; - __ Ldr(x10, GlobalObjectMemOperand()); - __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kNativeContextOffset)); - __ Ldr(x10, ContextMemOperand(x10, Context::JS_ARRAY_MAPS_INDEX)); - const int header_size = FixedArrayBase::kHeaderSize; - // Verify that the object can be transitioned in place. - const int origin_offset = header_size + elements_kind() * kPointerSize; - __ ldr(x11, FieldMemOperand(receiver, origin_offset)); - __ ldr(x12, FieldMemOperand(x10, HeapObject::kMapOffset)); - __ cmp(x11, x12); - __ B(ne, &call_builtin); - - const int target_offset = header_size + target_kind * kPointerSize; - __ Ldr(x10, FieldMemOperand(x10, target_offset)); - __ Mov(x11, receiver); - ElementsTransitionGenerator::GenerateMapChangeElementsTransition( - masm, DONT_TRACK_ALLOCATION_SITE, NULL); - } - - // Save new length. - __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); - - // Store the value. - // We may need a register containing the address end_elements below, - // so write back the value in end_elements. - __ Add(end_elements, elements, - Operand::UntagSmiAndScale(length, kPointerSizeLog2)); - __ Str(value, MemOperand(end_elements, kEndElementsOffset, PreIndex)); - - __ RecordWrite(elements, - end_elements, - value, - kLRHasNotBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ Drop(argc + 1); - __ Mov(x0, length); - __ Ret(); - - __ Bind(&attempt_to_grow_elements); - - if (!FLAG_inline_new) { - __ B(&call_builtin); - } - - Register argument = x2; - __ Peek(argument, (argc - 1) * kPointerSize); - // Growing elements that are SMI-only requires special handling in case - // the new element is non-Smi. For now, delegate to the builtin. - if (IsFastSmiElementsKind(elements_kind())) { - __ JumpIfNotSmi(argument, &call_builtin); - } - - // We could be lucky and the elements array could be at the top of new-space. - // In this case we can just grow it in place by moving the allocation pointer - // up. - ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(isolate); - ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(isolate); - - const int kAllocationDelta = 4; - ASSERT(kAllocationDelta >= argc); - Register allocation_top_addr = x5; - Register allocation_top = x9; - // Load top and check if it is the end of elements. - __ Add(end_elements, elements, - Operand::UntagSmiAndScale(length, kPointerSizeLog2)); - __ Add(end_elements, end_elements, kEndElementsOffset); - __ Mov(allocation_top_addr, new_space_allocation_top); - __ Ldr(allocation_top, MemOperand(allocation_top_addr)); - __ Cmp(end_elements, allocation_top); - __ B(ne, &call_builtin); - - __ Mov(x10, new_space_allocation_limit); - __ Ldr(x10, MemOperand(x10)); - __ Add(allocation_top, allocation_top, kAllocationDelta * kPointerSize); - __ Cmp(allocation_top, x10); - __ B(hi, &call_builtin); - - // We fit and could grow elements. - // Update new_space_allocation_top. - __ Str(allocation_top, MemOperand(allocation_top_addr)); - // Push the argument. - __ Str(argument, MemOperand(end_elements)); - // Fill the rest with holes. - __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); - ASSERT(kAllocationDelta == 4); - __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize)); - __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize)); - - // Update elements' and array's sizes. - __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ Add(elements_length, elements_length, Smi::FromInt(kAllocationDelta)); - __ Str(elements_length, - FieldMemOperand(elements, FixedArray::kLengthOffset)); - - // Elements are in new space, so write barrier is not required. - __ Drop(argc + 1); - __ Mov(x0, length); - __ Ret(); - - __ Bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); -} - - void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- x1 : left // -- x0 : right // -- lr : return address // ----------------------------------- - Isolate* isolate = masm->isolate(); // Load x2 with the allocation site. We stick an undefined dummy value here // and replace it with the real allocation site later when we instantiate this // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). - __ LoadObject(x2, handle(isolate->heap()->undefined_value())); + __ LoadObject(x2, handle(isolate()->heap()->undefined_value())); // Make sure that we actually patched the allocation site. if (FLAG_debug_code) { @@ -4638,7 +4387,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // Tail call into the stub that handles binary operations with allocation // sites. - BinaryOpWithAllocationSiteStub stub(state_); + BinaryOpWithAllocationSiteStub stub(isolate(), state_); __ TailCallStub(&stub); } @@ -4699,12 +4448,12 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { __ Mov(address, regs_.address()); __ Mov(x0, regs_.object()); __ Mov(x1, address); - __ Mov(x2, ExternalReference::isolate_address(masm->isolate())); + __ Mov(x2, ExternalReference::isolate_address(isolate())); AllowExternalCallThatCantCauseGC scope(masm); ExternalReference function = ExternalReference::incremental_marking_record_write_function( - masm->isolate()); + isolate()); __ CallCFunction(function, 3, 0); regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); @@ -4890,15 +4639,15 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { __ Bind(&double_elements); __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); - __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0, d1, + __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0, &slow_elements); __ Ret(); } void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { - CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); - __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); + __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); int parameter_count_offset = StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; __ Ldr(x1, MemOperand(fp, parameter_count_offset)); @@ -4920,7 +4669,7 @@ static const unsigned int kProfileEntryHookCallSize = void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != NULL) { - ProfileEntryHookStub stub; + ProfileEntryHookStub stub(masm->isolate()); Assembler::BlockConstPoolScope no_const_pools(masm); Label entry_hook_call_start; __ Bind(&entry_hook_call_start); @@ -4949,7 +4698,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { #if V8_HOST_ARCH_ARM64 uintptr_t entry_hook = - reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook()); + reinterpret_cast<uintptr_t>(isolate()->function_entry_hook()); __ Mov(x10, entry_hook); #else // Under the simulator we need to indirect the entry hook through a trampoline @@ -4957,9 +4706,9 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); __ Mov(x10, Operand(ExternalReference(&dispatcher, ExternalReference::BUILTIN_CALL, - masm->isolate()))); + isolate()))); // It additionally takes an isolate as a third parameter - __ Mov(x2, ExternalReference::isolate_address(masm->isolate())); + __ Mov(x2, ExternalReference::isolate_address(isolate())); #endif // The caller's return address is above the saved temporaries. @@ -4992,6 +4741,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { __ Blr(x10); // Return to calling code. __ Peek(lr, 0); + __ AssertFPCRState(); __ Ret(); __ SetStackPointer(old_stack_pointer); @@ -5004,7 +4754,7 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm, ASSERT(csp.Is(__ StackPointer())); intptr_t code = - reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); + reinterpret_cast<intptr_t>(GetCode().location()); __ Mov(lr, Operand(code, RelocInfo::CODE_TARGET)); __ Mov(x10, target); // Branch to the stub. @@ -5083,7 +4833,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup( } Label not_found; - NameDictionaryLookupStub stub(POSITIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP); __ CallStub(&stub); __ Cbz(x0, ¬_found); __ Mov(scratch2, x2); // Move entry index into scratch2. @@ -5154,7 +4904,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, __ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); __ Mov(x1, Operand(name)); - NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP); __ CallStub(&stub); // Move stub return value to scratch0. Note that scratch0 is not included in // spill_list and won't be clobbered by PopCPURegList. @@ -5257,7 +5007,7 @@ static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { ASM_LOCATION("CreateArrayDispatch"); if (mode == DISABLE_ALLOCATION_SITES) { - T stub(GetInitialFastElementsKind(), mode); + T stub(masm->isolate(), GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -5270,7 +5020,7 @@ static void CreateArrayDispatch(MacroAssembler* masm, // TODO(jbramley): Is this the best way to handle this? Can we make the // tail calls conditional, rather than hopping over each one? __ CompareAndBranch(kind, candidate_kind, ne, &next); - T stub(candidate_kind); + T stub(masm->isolate(), candidate_kind); __ TailCallStub(&stub); __ Bind(&next); } @@ -5320,12 +5070,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); - ArraySingleArgumentConstructorStub stub_holey(holey_initial, + ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), + holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); __ Bind(&normal_sequence); - ArraySingleArgumentConstructorStub stub(initial, + ArraySingleArgumentConstructorStub stub(masm->isolate(), + initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -5357,7 +5109,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, Label next; ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); __ CompareAndBranch(kind, candidate_kind, ne, &next); - ArraySingleArgumentConstructorStub stub(candidate_kind); + ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind); __ TailCallStub(&stub); __ Bind(&next); } @@ -5376,11 +5128,11 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); - stub.GetCode(isolate); + T stub(isolate, kind); + stub.GetCode(); if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { - T stub1(kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(isolate); + T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); + stub1.GetCode(); } } } @@ -5401,12 +5153,12 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things - InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); - stubh1.GetCode(isolate); - InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); - stubh2.GetCode(isolate); - InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); - stubh3.GetCode(isolate); + InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); + stubh1.GetCode(); + InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); + stubh2.GetCode(); + InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); + stubh3.GetCode(); } } @@ -5509,22 +5261,22 @@ void InternalArrayConstructorStub::GenerateCase( __ Cbz(x10, &packed_case); InternalArraySingleArgumentConstructorStub - stub1_holey(GetHoleyElementsKind(kind)); + stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey); __ Bind(&packed_case); } - InternalArraySingleArgumentConstructorStub stub1(kind); + InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); __ Bind(&zero_case); // No arguments. - InternalArrayNoArgumentConstructorStub stub0(kind); + InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0); __ Bind(&n_case); // N arguments. - InternalArrayNArgumentsConstructorStub stubN(kind); + InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); __ TailCallStub(&stubN); } @@ -5536,8 +5288,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { // -- sp[0] : return address // -- sp[4] : last argument // ----------------------------------- - Handle<Object> undefined_sentinel( - masm->isolate()->heap()->undefined_value(), masm->isolate()); Register constructor = x1; @@ -5616,8 +5366,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kArgsLength == 7); - Isolate* isolate = masm->isolate(); - // FunctionCallbackArguments: context, callee and call data. __ Push(context, callee, call_data); @@ -5628,7 +5376,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); } Register isolate_reg = x5; - __ Mov(isolate_reg, ExternalReference::isolate_address(isolate)); + __ Mov(isolate_reg, ExternalReference::isolate_address(isolate())); // FunctionCallbackArguments: // return value, return value default, isolate, holder. @@ -5662,11 +5410,8 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize)); const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); - ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_function_callback(isolate()); AllowExternalCallThatCantCauseGC scope(masm); MemOperand context_restore_operand( @@ -5719,12 +5464,8 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); - ExternalReference::Type thunk_type = - ExternalReference::PROFILING_GETTER_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_accessor_getter_callback(isolate()); const int spill_offset = 1 + kApiStackSpace; __ CallApiFunctionAndReturn(api_function_address, diff --git a/deps/v8/src/arm64/code-stubs-arm64.h b/deps/v8/src/arm64/code-stubs-arm64.h index 7e09ffa57..a92445c47 100644 --- a/deps/v8/src/arm64/code-stubs-arm64.h +++ b/deps/v8/src/arm64/code-stubs-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_CODE_STUBS_ARM64_H_ #define V8_ARM64_CODE_STUBS_ARM64_H_ @@ -39,8 +16,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); class StoreBufferOverflowStub: public PlatformCodeStub { public: - explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) - : save_doubles_(save_fp) { } + StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp) + : PlatformCodeStub(isolate), save_doubles_(save_fp) { } void Generate(MacroAssembler* masm); @@ -79,8 +56,8 @@ class StringHelper : public AllStatic { class StoreRegistersStateStub: public PlatformCodeStub { public: - explicit StoreRegistersStateStub(SaveFPRegsMode with_fp) - : save_doubles_(with_fp) {} + StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp) + : PlatformCodeStub(isolate), save_doubles_(with_fp) {} static Register to_be_pushed_lr() { return ip0; } static void GenerateAheadOfTime(Isolate* isolate); @@ -95,8 +72,8 @@ class StoreRegistersStateStub: public PlatformCodeStub { class RestoreRegistersStateStub: public PlatformCodeStub { public: - explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp) - : save_doubles_(with_fp) {} + RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp) + : PlatformCodeStub(isolate), save_doubles_(with_fp) {} static void GenerateAheadOfTime(Isolate* isolate); private: @@ -113,12 +90,14 @@ class RecordWriteStub: public PlatformCodeStub { // Stub to record the write of 'value' at 'address' in 'object'. // Typically 'address' = 'object' + <some offset>. // See MacroAssembler::RecordWriteField() for example. - RecordWriteStub(Register object, + RecordWriteStub(Isolate* isolate, + Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) - : object_(object), + : PlatformCodeStub(isolate), + object_(object), value_(value), address_(address), remembered_set_action_(remembered_set_action), @@ -210,9 +189,15 @@ class RecordWriteStub: public PlatformCodeStub { : object_(object), address_(address), scratch0_(scratch), - saved_regs_(kCallerSaved) { + saved_regs_(kCallerSaved), + saved_fp_regs_(kCallerSavedFP) { ASSERT(!AreAliased(scratch, object, address)); + // The SaveCallerSaveRegisters method needs to save caller-saved + // registers, but we don't bother saving MacroAssembler scratch registers. + saved_regs_.Remove(MacroAssembler::DefaultTmpList()); + saved_fp_regs_.Remove(MacroAssembler::DefaultFPTmpList()); + // We would like to require more scratch registers for this stub, // but the number of registers comes down to the ones used in // FullCodeGen::SetVar(), which is architecture independent. @@ -223,12 +208,6 @@ class RecordWriteStub: public PlatformCodeStub { scratch1_ = Register(pool_available.PopLowestIndex()); scratch2_ = Register(pool_available.PopLowestIndex()); - // SaveCallerRegisters method needs to save caller saved register, however - // we don't bother saving ip0 and ip1 because they are used as scratch - // registers by the MacroAssembler. - saved_regs_.Remove(ip0); - saved_regs_.Remove(ip1); - // The scratch registers will be restored by other means so we don't need // to save them with the other caller saved registers. saved_regs_.Remove(scratch0_); @@ -253,7 +232,7 @@ class RecordWriteStub: public PlatformCodeStub { // register will need to be preserved. Can we improve this? masm->PushCPURegList(saved_regs_); if (mode == kSaveFPRegs) { - masm->PushCPURegList(kCallerSavedFP); + masm->PushCPURegList(saved_fp_regs_); } } @@ -261,7 +240,7 @@ class RecordWriteStub: public PlatformCodeStub { // TODO(all): This can be very expensive, and it is likely that not every // register will need to be preserved. Can we improve this? if (mode == kSaveFPRegs) { - masm->PopCPURegList(kCallerSavedFP); + masm->PopCPURegList(saved_fp_regs_); } masm->PopCPURegList(saved_regs_); } @@ -279,6 +258,7 @@ class RecordWriteStub: public PlatformCodeStub { Register scratch1_; Register scratch2_; CPURegList saved_regs_; + CPURegList saved_fp_regs_; // TODO(all): We should consider moving this somewhere else. static CPURegList GetValidRegistersForAllocation() { @@ -296,10 +276,7 @@ class RecordWriteStub: public PlatformCodeStub { CPURegList list(CPURegister::kRegister, kXRegSizeInBits, 0, 25); // We also remove MacroAssembler's scratch registers. - list.Remove(ip0); - list.Remove(ip1); - list.Remove(x8); - list.Remove(x9); + list.Remove(MacroAssembler::DefaultTmpList()); return list; } @@ -372,7 +349,7 @@ class RecordWriteStub: public PlatformCodeStub { // the exit frame before doing the call with GenerateCall. class DirectCEntryStub: public PlatformCodeStub { public: - DirectCEntryStub() {} + explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} void Generate(MacroAssembler* masm); void GenerateCall(MacroAssembler* masm, Register target); @@ -388,7 +365,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub { public: enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; - explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { } + NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) + : PlatformCodeStub(isolate), mode_(mode) { } void Generate(MacroAssembler* masm); @@ -436,7 +414,7 @@ class NameDictionaryLookupStub: public PlatformCodeStub { class SubStringStub: public PlatformCodeStub { public: - SubStringStub() {} + explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: Major MajorKey() { return SubString; } @@ -448,7 +426,7 @@ class SubStringStub: public PlatformCodeStub { class StringCompareStub: public PlatformCodeStub { public: - StringCompareStub() { } + explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { } // Compares two flat ASCII strings and returns result in x0. static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, diff --git a/deps/v8/src/arm64/codegen-arm64.cc b/deps/v8/src/arm64/codegen-arm64.cc index 831d44986..ff06eda86 100644 --- a/deps/v8/src/arm64/codegen-arm64.cc +++ b/deps/v8/src/arm64/codegen-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -339,8 +316,8 @@ void ElementsTransitionGenerator::GenerateDoubleToObject( // Non-hole double, copy value into a heap number. Register heap_num = x5; - __ AllocateHeapNumber(heap_num, &gc_required, x6, x4, heap_num_map); - __ Str(x13, FieldMemOperand(heap_num, HeapNumber::kValueOffset)); + __ AllocateHeapNumber(heap_num, &gc_required, x6, x4, + x13, heap_num_map); __ Mov(x13, dst_elements); __ Str(heap_num, MemOperand(dst_elements, kPointerSize, PostIndex)); __ RecordWrite(array, x13, heap_num, kLRHasBeenSaved, kDontSaveFPRegs, @@ -373,14 +350,41 @@ void ElementsTransitionGenerator::GenerateDoubleToObject( } -bool Code::IsYoungSequence(byte* sequence) { - return MacroAssembler::IsYoungSequence(sequence); +CodeAgingHelper::CodeAgingHelper() { + ASSERT(young_sequence_.length() == kNoCodeAgeSequenceLength); + // The sequence of instructions that is patched out for aging code is the + // following boilerplate stack-building prologue that is found both in + // FUNCTION and OPTIMIZED_FUNCTION code: + PatchingAssembler patcher(young_sequence_.start(), + young_sequence_.length() / kInstructionSize); + // The young sequence is the frame setup code for FUNCTION code types. It is + // generated by FullCodeGenerator::Generate. + MacroAssembler::EmitFrameSetupForCodeAgePatching(&patcher); + +#ifdef DEBUG + const int length = kCodeAgeStubEntryOffset / kInstructionSize; + ASSERT(old_sequence_.length() >= kCodeAgeStubEntryOffset); + PatchingAssembler patcher_old(old_sequence_.start(), length); + MacroAssembler::EmitCodeAgeSequence(&patcher_old, NULL); +#endif +} + + +#ifdef DEBUG +bool CodeAgingHelper::IsOld(byte* candidate) const { + return memcmp(candidate, old_sequence_.start(), kCodeAgeStubEntryOffset) == 0; +} +#endif + + +bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) { + return MacroAssembler::IsYoungSequence(isolate, sequence); } -void Code::GetCodeAgeAndParity(byte* sequence, Age* age, +void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity) { - if (IsYoungSequence(sequence)) { + if (IsYoungSequence(isolate, sequence)) { *age = kNoAgeCodeAge; *parity = NO_MARKING_PARITY; } else { @@ -395,7 +399,8 @@ void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age, MarkingParity parity) { - PatchingAssembler patcher(sequence, kCodeAgeSequenceSize / kInstructionSize); + PatchingAssembler patcher(sequence, + kNoCodeAgeSequenceLength / kInstructionSize); if (age == kNoAgeCodeAge) { MacroAssembler::EmitFrameSetupForCodeAgePatching(&patcher); } else { diff --git a/deps/v8/src/arm64/codegen-arm64.h b/deps/v8/src/arm64/codegen-arm64.h index 4d8a9a85a..bb42bf8d3 100644 --- a/deps/v8/src/arm64/codegen-arm64.h +++ b/deps/v8/src/arm64/codegen-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_CODEGEN_ARM64_H_ #define V8_ARM64_CODEGEN_ARM64_H_ diff --git a/deps/v8/src/arm64/constants-arm64.h b/deps/v8/src/arm64/constants-arm64.h index 8866e23cf..7ee22760d 100644 --- a/deps/v8/src/arm64/constants-arm64.h +++ b/deps/v8/src/arm64/constants-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_CONSTANTS_ARM64_H_ #define V8_ARM64_CONSTANTS_ARM64_H_ @@ -112,6 +89,8 @@ const unsigned kZeroRegCode = 31; const unsigned kJSSPCode = 28; const unsigned kSPRegInternalCode = 63; const unsigned kRegCodeMask = 0x1f; +const unsigned kShiftAmountWRegMask = 0x1f; +const unsigned kShiftAmountXRegMask = 0x3f; // Standard machine types defined by AAPCS64. const unsigned kByteSize = 8; const unsigned kByteSizeInBytes = kByteSize >> 3; @@ -130,6 +109,7 @@ const unsigned kQuadWordSizeInBytes = kQuadWordSize >> 3; // AArch64 floating-point specifics. These match IEEE-754. const unsigned kDoubleMantissaBits = 52; const unsigned kDoubleExponentBits = 11; +const unsigned kDoubleExponentBias = 1023; const unsigned kFloatMantissaBits = 23; const unsigned kFloatExponentBits = 8; @@ -262,8 +242,8 @@ const int ImmPCRel_mask = ImmPCRelLo_mask | ImmPCRelHi_mask; enum Condition { eq = 0, ne = 1, - hs = 2, - lo = 3, + hs = 2, cs = hs, + lo = 3, cc = lo, mi = 4, pl = 5, vs = 6, diff --git a/deps/v8/src/arm64/cpu-arm64.cc b/deps/v8/src/arm64/cpu-arm64.cc index b8899adb3..0e1ed91be 100644 --- a/deps/v8/src/arm64/cpu-arm64.cc +++ b/deps/v8/src/arm64/cpu-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // CPU specific code for arm independent of OS goes here. @@ -41,28 +18,36 @@ namespace internal { bool CpuFeatures::initialized_ = false; #endif unsigned CpuFeatures::supported_ = 0; -unsigned CpuFeatures::found_by_runtime_probing_only_ = 0; unsigned CpuFeatures::cross_compile_ = 0; -// Initialise to smallest possible cache size. -unsigned CpuFeatures::dcache_line_size_ = 1; -unsigned CpuFeatures::icache_line_size_ = 1; +class CacheLineSizes { + public: + CacheLineSizes() { +#ifdef USE_SIMULATOR + cache_type_register_ = 0; +#else + // Copy the content of the cache type register to a core register. + __asm__ __volatile__ ("mrs %[ctr], ctr_el0" // NOLINT + : [ctr] "=r" (cache_type_register_)); +#endif + }; -void CPU::SetUp() { - CpuFeatures::Probe(); -} + uint32_t icache_line_size() const { return ExtractCacheLineSize(0); } + uint32_t dcache_line_size() const { return ExtractCacheLineSize(16); } + private: + uint32_t ExtractCacheLineSize(int cache_line_size_shift) const { + // The cache type register holds the size of the caches as a power of two. + return 1 << ((cache_type_register_ >> cache_line_size_shift) & 0xf); + } -bool CPU::SupportsCrankshaft() { - return true; -} + uint32_t cache_type_register_; +}; void CPU::FlushICache(void* address, size_t length) { - if (length == 0) { - return; - } + if (length == 0) return; #ifdef USE_SIMULATOR // TODO(all): consider doing some cache simulation to ensure every address @@ -76,8 +61,9 @@ void CPU::FlushICache(void* address, size_t length) { uintptr_t start = reinterpret_cast<uintptr_t>(address); // Sizes will be used to generate a mask big enough to cover a pointer. - uintptr_t dsize = static_cast<uintptr_t>(CpuFeatures::dcache_line_size()); - uintptr_t isize = static_cast<uintptr_t>(CpuFeatures::icache_line_size()); + CacheLineSizes sizes; + uintptr_t dsize = sizes.dcache_line_size(); + uintptr_t isize = sizes.icache_line_size(); // Cache line sizes are always a power of 2. ASSERT(CountSetBits(dsize, 64) == 1); ASSERT(CountSetBits(isize, 64) == 1); @@ -139,26 +125,7 @@ void CPU::FlushICache(void* address, size_t length) { } -void CpuFeatures::Probe() { - // Compute I and D cache line size. The cache type register holds - // information about the caches. - uint32_t cache_type_register = GetCacheType(); - - static const int kDCacheLineSizeShift = 16; - static const int kICacheLineSizeShift = 0; - static const uint32_t kDCacheLineSizeMask = 0xf << kDCacheLineSizeShift; - static const uint32_t kICacheLineSizeMask = 0xf << kICacheLineSizeShift; - - // The cache type register holds the size of the I and D caches as a power of - // two. - uint32_t dcache_line_size_power_of_two = - (cache_type_register & kDCacheLineSizeMask) >> kDCacheLineSizeShift; - uint32_t icache_line_size_power_of_two = - (cache_type_register & kICacheLineSizeMask) >> kICacheLineSizeShift; - - dcache_line_size_ = 1 << dcache_line_size_power_of_two; - icache_line_size_ = 1 << icache_line_size_power_of_two; - +void CpuFeatures::Probe(bool serializer_enabled) { // AArch64 has no configuration options, no further probing is required. supported_ = 0; @@ -168,32 +135,6 @@ void CpuFeatures::Probe() { } -unsigned CpuFeatures::dcache_line_size() { - ASSERT(initialized_); - return dcache_line_size_; -} - - -unsigned CpuFeatures::icache_line_size() { - ASSERT(initialized_); - return icache_line_size_; -} - - -uint32_t CpuFeatures::GetCacheType() { -#ifdef USE_SIMULATOR - // This will lead to a cache with 1 byte long lines, which is fine since the - // simulator will not need this information. - return 0; -#else - uint32_t cache_type_register; - // Copy the content of the cache type register to a core register. - __asm__ __volatile__ ("mrs %[ctr], ctr_el0" // NOLINT - : [ctr] "=r" (cache_type_register)); - return cache_type_register; -#endif -} - } } // namespace v8::internal #endif // V8_TARGET_ARCH_ARM64 diff --git a/deps/v8/src/arm64/cpu-arm64.h b/deps/v8/src/arm64/cpu-arm64.h index ddec72d8f..0b7a7d7f1 100644 --- a/deps/v8/src/arm64/cpu-arm64.h +++ b/deps/v8/src/arm64/cpu-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_CPU_ARM64_H_ #define V8_ARM64_CPU_ARM64_H_ @@ -42,7 +19,7 @@ class CpuFeatures : public AllStatic { public: // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). - static void Probe(); + static void Probe(bool serializer_enabled); // Check whether a feature is supported by the target CPU. static bool IsSupported(CpuFeature f) { @@ -51,15 +28,9 @@ class CpuFeatures : public AllStatic { return false; }; - static bool IsFoundByRuntimeProbingOnly(CpuFeature f) { - ASSERT(initialized_); - // There are no optional features for ARM64. - return false; - } - - static bool IsSafeForSnapshot(CpuFeature f) { - return (IsSupported(f) && - (!Serializer::enabled() || !IsFoundByRuntimeProbingOnly(f))); + // There are no optional features for ARM64. + static bool IsSafeForSnapshot(Isolate* isolate, CpuFeature f) { + return IsSupported(f); } // I and D cache line size in bytes. @@ -81,21 +52,14 @@ class CpuFeatures : public AllStatic { return true; } - private: - // Return the content of the cache type register. - static uint32_t GetCacheType(); - - // I and D cache line size in bytes. - static unsigned icache_line_size_; - static unsigned dcache_line_size_; + static bool SupportsCrankshaft() { return true; } + private: #ifdef DEBUG static bool initialized_; #endif // This isn't used (and is always 0), but it is required by V8. - static unsigned found_by_runtime_probing_only_; - static unsigned cross_compile_; friend class PlatformFeatureScope; diff --git a/deps/v8/src/arm64/debug-arm64.cc b/deps/v8/src/arm64/debug-arm64.cc index 716337f05..6b1896782 100644 --- a/deps/v8/src/arm64/debug-arm64.cc +++ b/deps/v8/src/arm64/debug-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -38,8 +15,6 @@ namespace internal { #define __ ACCESS_MASM(masm) - -#ifdef ENABLE_DEBUGGER_SUPPORT bool BreakLocationIterator::IsDebugBreakAtReturn() { return Debug::IsDebugBreakAtReturn(rinfo()); } @@ -67,7 +42,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() { STATIC_ASSERT(Assembler::kJSRetSequenceInstructions >= 5); PatchingAssembler patcher(reinterpret_cast<Instruction*>(rinfo()->pc()), 5); byte* entry = - debug_info_->GetIsolate()->debug()->debug_break_return()->entry(); + debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry(); // The first instruction of a patched return sequence must be a load literal // loading the address of the debug break return code. @@ -126,7 +101,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() { STATIC_ASSERT(Assembler::kDebugBreakSlotInstructions >= 4); PatchingAssembler patcher(reinterpret_cast<Instruction*>(rinfo()->pc()), 4); byte* entry = - debug_info_->GetIsolate()->debug()->debug_break_slot()->entry(); + debug_info_->GetIsolate()->builtins()->Slot_DebugBreak()->entry(); // The first instruction of a patched debug break slot must be a load literal // loading the address of the debug break slot code. @@ -204,7 +179,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ Mov(x0, 0); // No arguments. __ Mov(x1, ExternalReference::debug_break(masm->isolate())); - CEntryStub stub(1); + CEntryStub stub(masm->isolate(), 1); __ CallStub(&stub); // Restore the register values from the expression stack. @@ -240,6 +215,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } +void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) { + // Register state for CallICStub + // ----------- S t a t e ------------- + // -- x1 : function + // -- x3 : slot in feedback array + // ----------------------------------- + Generate_DebugBreakCallHelper(masm, x1.Bit() | x3.Bit(), 0, x10); +} + + void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Calling convention for IC load (from ic-arm.cc). // ----------- S t a t e ------------- @@ -296,15 +281,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { - // Calling convention for IC call (from ic-arm.cc) - // ----------- S t a t e ------------- - // -- x2 : name - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, x2.Bit(), 0, x10); -} - - void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { // In places other than IC call sites it is expected that r0 is TOS which // is an object - this is not generally the case so this should be used with @@ -322,17 +298,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { - // Register state for CallFunctionStub (from code-stubs-arm64.cc). - // ----------- S t a t e ------------- - // -- x1 : function - // -- x2 : feedback array - // -- x3 : slot in feedback array - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, x1.Bit() | x2.Bit() | x3.Bit(), 0, x10); -} - - void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { // Calling convention for CallConstructStub (from code-stubs-arm64.cc). // ----------- S t a t e ------------- @@ -386,8 +351,6 @@ void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) { const bool Debug::kFrameDropperSupported = false; -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal #endif // V8_TARGET_ARCH_ARM64 diff --git a/deps/v8/src/arm64/decoder-arm64-inl.h b/deps/v8/src/arm64/decoder-arm64-inl.h index 94009c704..eb791336d 100644 --- a/deps/v8/src/arm64/decoder-arm64-inl.h +++ b/deps/v8/src/arm64/decoder-arm64-inl.h @@ -1,29 +1,6 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_DECODER_ARM64_INL_H_ #define V8_ARM64_DECODER_ARM64_INL_H_ diff --git a/deps/v8/src/arm64/decoder-arm64.cc b/deps/v8/src/arm64/decoder-arm64.cc index a9829f0ab..13962387d 100644 --- a/deps/v8/src/arm64/decoder-arm64.cc +++ b/deps/v8/src/arm64/decoder-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm64/decoder-arm64.h b/deps/v8/src/arm64/decoder-arm64.h index e48f741bf..4409421bd 100644 --- a/deps/v8/src/arm64/decoder-arm64.h +++ b/deps/v8/src/arm64/decoder-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_DECODER_ARM64_H_ #define V8_ARM64_DECODER_ARM64_H_ diff --git a/deps/v8/src/arm64/deoptimizer-arm64.cc b/deps/v8/src/arm64/deoptimizer-arm64.cc index 93cb5176d..a19e2fc9f 100644 --- a/deps/v8/src/arm64/deoptimizer-arm64.cc +++ b/deps/v8/src/arm64/deoptimizer-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -138,6 +115,43 @@ Code* Deoptimizer::NotifyStubFailureBuiltin() { } +#define __ masm-> + +static void CopyRegisterDumpToFrame(MacroAssembler* masm, + Register frame, + CPURegList reg_list, + Register scratch1, + Register scratch2, + int src_offset, + int dst_offset) { + int offset0, offset1; + CPURegList copy_to_input = reg_list; + int reg_count = reg_list.Count(); + int reg_size = reg_list.RegisterSizeInBytes(); + for (int i = 0; i < (reg_count / 2); i++) { + __ PeekPair(scratch1, scratch2, src_offset + (i * reg_size * 2)); + + offset0 = (copy_to_input.PopLowestIndex().code() * reg_size) + dst_offset; + offset1 = (copy_to_input.PopLowestIndex().code() * reg_size) + dst_offset; + + if ((offset0 + reg_size) == offset1) { + // Registers are adjacent: store in pairs. + __ Stp(scratch1, scratch2, MemOperand(frame, offset0)); + } else { + // Registers are not adjacent: store individually. + __ Str(scratch1, MemOperand(frame, offset0)); + __ Str(scratch2, MemOperand(frame, offset1)); + } + } + if ((reg_count & 1) != 0) { + __ Peek(scratch1, src_offset + (reg_count - 1) * reg_size); + offset0 = (copy_to_input.PopLowestIndex().code() * reg_size) + dst_offset; + __ Str(scratch1, MemOperand(frame, offset0)); + } +} + +#undef __ + #define __ masm()-> void Deoptimizer::EntryGenerator::Generate() { @@ -200,25 +214,13 @@ void Deoptimizer::EntryGenerator::Generate() { __ Ldr(x1, MemOperand(deoptimizer, Deoptimizer::input_offset())); // Copy core registers into the input frame. - CPURegList copy_to_input = saved_registers; - for (int i = 0; i < saved_registers.Count(); i++) { - // TODO(all): Look for opportunities to optimize this by using ldp/stp. - __ Peek(x2, i * kPointerSize); - CPURegister current_reg = copy_to_input.PopLowestIndex(); - int offset = (current_reg.code() * kPointerSize) + - FrameDescription::registers_offset(); - __ Str(x2, MemOperand(x1, offset)); - } + CopyRegisterDumpToFrame(masm(), x1, saved_registers, x2, x4, 0, + FrameDescription::registers_offset()); // Copy FP registers to the input frame. - for (int i = 0; i < saved_fp_registers.Count(); i++) { - // TODO(all): Look for opportunities to optimize this by using ldp/stp. - int dst_offset = FrameDescription::double_registers_offset() + - (i * kDoubleSize); - int src_offset = kFPRegistersOffset + (i * kDoubleSize); - __ Peek(x2, src_offset); - __ Str(x2, MemOperand(x1, dst_offset)); - } + CopyRegisterDumpToFrame(masm(), x1, saved_fp_registers, x2, x4, + kFPRegistersOffset, + FrameDescription::double_registers_offset()); // Remove the bailout id and the saved registers from the stack. __ Drop(1 + (kSavedRegistersAreaSize / kXRegSize)); diff --git a/deps/v8/src/arm64/disasm-arm64.cc b/deps/v8/src/arm64/disasm-arm64.cc index ed3e92879..e9e1decad 100644 --- a/deps/v8/src/arm64/disasm-arm64.cc +++ b/deps/v8/src/arm64/disasm-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <assert.h> #include <stdio.h> @@ -1632,10 +1609,9 @@ int Disassembler::SubstituteBranchTargetField(Instruction* instr, offset <<= kInstructionSizeLog2; char sign = '+'; if (offset < 0) { - offset = -offset; sign = '-'; } - AppendToOutput("#%c0x%" PRIx64 " (addr %p)", sign, offset, + AppendToOutput("#%c0x%" PRIx64 " (addr %p)", sign, Abs(offset), instr->InstructionAtOffset(offset), Instruction::NO_CHECK); return 8; } diff --git a/deps/v8/src/arm64/disasm-arm64.h b/deps/v8/src/arm64/disasm-arm64.h index 8c964a890..42552a2d8 100644 --- a/deps/v8/src/arm64/disasm-arm64.h +++ b/deps/v8/src/arm64/disasm-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_DISASM_ARM64_H #define V8_ARM64_DISASM_ARM64_H diff --git a/deps/v8/src/arm64/frames-arm64.cc b/deps/v8/src/arm64/frames-arm64.cc index 8c1bc20ac..da638ad6e 100644 --- a/deps/v8/src/arm64/frames-arm64.cc +++ b/deps/v8/src/arm64/frames-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm64/frames-arm64.h b/deps/v8/src/arm64/frames-arm64.h index 8b5641058..3996bd75d 100644 --- a/deps/v8/src/arm64/frames-arm64.h +++ b/deps/v8/src/arm64/frames-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "arm64/constants-arm64.h" #include "arm64/assembler-arm64.h" diff --git a/deps/v8/src/arm64/full-codegen-arm64.cc b/deps/v8/src/arm64/full-codegen-arm64.cc index d40e74aa2..0196e69e4 100644 --- a/deps/v8/src/arm64/full-codegen-arm64.cc +++ b/deps/v8/src/arm64/full-codegen-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -117,10 +94,14 @@ static void EmitStackCheck(MacroAssembler* masm_, Label ok; ASSERT(jssp.Is(__ StackPointer())); ASSERT(scratch.Is(jssp) == (pointers == 0)); + Heap::RootListIndex index; if (pointers != 0) { __ Sub(scratch, jssp, pointers * kPointerSize); + index = Heap::kRealStackLimitRootIndex; + } else { + index = Heap::kStackLimitRootIndex; } - __ CompareRoot(scratch, Heap::kStackLimitRootIndex); + __ CompareRoot(scratch, index); __ B(hs, &ok); PredictableCodeSizeScope predictable(masm_, Assembler::kCallSizeWithRelocation); @@ -148,8 +129,6 @@ void FullCodeGenerator::Generate() { handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); - InitializeFeedbackVector(); - profiling_counter_ = isolate()->factory()->NewCell( Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -237,7 +216,7 @@ void FullCodeGenerator::Generate() { __ Push(x1, x10); __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ Push(x1); @@ -297,7 +276,7 @@ void FullCodeGenerator::Generate() { } else { type = ArgumentsAccessStub::NEW_SLOPPY_FAST; } - ArgumentsAccessStub stub(type); + ArgumentsAccessStub stub(isolate(), type); __ CallStub(&stub); SetVar(arguments, x0, x1, x2); @@ -387,7 +366,12 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, Label ok; ASSERT(back_edge_target->is_bound()); - int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); + // We want to do a round rather than a floor of distance/kCodeSizeMultiplier + // to reduce the absolute error due to the integer division. To do that, + // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to + // the result). + int distance = + masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2; int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); EmitProfilingCounterDecrement(weight); @@ -430,7 +414,7 @@ void FullCodeGenerator::EmitReturnSequence() { if (info_->ShouldSelfOptimize()) { weight = FLAG_interrupt_budget / FLAG_self_opt_count; } else { - int distance = masm_->pc_offset(); + int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); } @@ -1195,12 +1179,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { // We got a fixed array in register x0. Iterate through that. __ Bind(&fixed_array); - Handle<Object> feedback = Handle<Object>( - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), - isolate()); - StoreFeedbackVectorSlot(slot, feedback); __ LoadObject(x1, FeedbackVector()); - __ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); + __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check. @@ -1359,7 +1339,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode(), info->is_generator()); + FastNewClosureStub stub(isolate(), + info->strict_mode(), + info->is_generator()); __ Mov(x2, Operand(info)); __ CallStub(&stub); } else { @@ -1672,13 +1654,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { int properties_count = constant_properties->length() / 2; const int max_cloned_properties = FastCloneShallowObjectStub::kMaximumClonedProperties; - if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || - flags != ObjectLiteral::kFastElements || + if (expr->may_store_doubles() || expr->depth() > 1 || + Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements || properties_count > max_cloned_properties) { __ Push(x3, x2, x1, x0); __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4); } else { - FastCloneShallowObjectStub stub(properties_count); + FastCloneShallowObjectStub stub(isolate(), properties_count); __ CallStub(&stub); } @@ -1816,13 +1798,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { if (has_fast_elements && constant_elements_values->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( + isolate(), FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, allocation_site_mode, length); __ CallStub(&stub); __ IncrementCounter( isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); - } else if ((expr->depth() > 1) || Serializer::enabled() || + } else if ((expr->depth() > 1) || Serializer::enabled(isolate()) || length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ Mov(x0, Smi::FromInt(flags)); __ Push(x3, x2, x1, x0); @@ -1837,7 +1820,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; } - FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, + length); __ CallStub(&stub); } @@ -1869,7 +1855,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); } else { __ Mov(x3, Smi::FromInt(i)); - StoreArrayLiteralElementStub stub; + StoreArrayLiteralElementStub stub(isolate()); __ CallStub(&stub); } @@ -1886,7 +1872,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { void FullCodeGenerator::VisitAssignment(Assignment* expr) { - ASSERT(expr->target()->IsValidLeftHandSide()); + ASSERT(expr->target()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ Assignment"); @@ -2030,10 +2016,10 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, patch_site.EmitJumpIfSmi(x10, &both_smis); __ Bind(&stub_call); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); { Assembler::BlockPoolsScope scope(masm_); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); } __ B(&done); @@ -2115,11 +2101,11 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode) { __ Pop(x1); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code. { Assembler::BlockPoolsScope scope(masm_); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); } context()->Plug(x0); @@ -2127,7 +2113,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, void FullCodeGenerator::EmitAssignment(Expression* expr) { - ASSERT(expr->IsValidLeftHandSide()); + ASSERT(expr->IsValidReferenceExpression()); // Left-hand side can only be a property, a global or a (parameter or local) // slot. @@ -2333,16 +2319,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code, // Code common for calls using the IC. -void FullCodeGenerator::EmitCallWithIC(Call* expr) { - ASM_LOCATION("EmitCallWithIC"); - +void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - CallFunctionFlags flags; + CallIC::CallType call_type = callee->IsVariableProxy() + ? CallIC::FUNCTION + : CallIC::METHOD; + // Get the target function. - if (callee->IsVariableProxy()) { + if (call_type == CallIC::FUNCTION) { { StackValueContext context(this); EmitVariableLoad(callee->AsVariableProxy()); PrepareForBailout(callee, NO_REGISTERS); @@ -2350,7 +2335,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push undefined as receiver. This is patched in the method prologue if it // is a sloppy mode method. __ Push(isolate()->factory()->undefined_value()); - flags = NO_CALL_FUNCTION_FLAGS; } else { // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2360,40 +2344,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push the target function under the receiver. __ Pop(x10); __ Push(x0, x10); - flags = CALL_AS_METHOD; } - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, flags); - __ Peek(x1, (arg_count + 1) * kPointerSize); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - - // Restore context register. - __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, x0); + EmitCall(expr, call_type); } // Code common for calls using the IC. -void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, - Expression* key) { +void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, + Expression* key) { // Load the key. VisitForAccumulatorValue(key); Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2405,28 +2368,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, __ Pop(x10); __ Push(x0, x10); - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, CALL_AS_METHOD); - __ Peek(x1, (arg_count + 1) * kPointerSize); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - // Restore context register. - __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, x0); + EmitCall(expr, CallIC::METHOD); } -void FullCodeGenerator::EmitCallWithStub(Call* expr) { - // Code common for calls using the call stub. +void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { + // Load the arguments. ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); { PreservePositionScope scope(masm()->positions_recorder()); @@ -2434,19 +2381,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { VisitForStackValue(args->at(i)); } } - // Record source position for debugger. + // Record source position of the IC call. SetSourcePosition(expr->position()); - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); - __ LoadObject(x2, FeedbackVector()); + Handle<Code> ic = CallIC::initialize_stub( + isolate(), arg_count, call_type); __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot())); - - // Record call targets in unoptimized code. - CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ Peek(x1, (arg_count + 1) * kXRegSize); - __ CallStub(&stub); + // Don't assign a type feedback id to the IC, since type feedback is provided + // by the vector above. + CallIC(ic); + RecordJSReturnSite(expr); // Restore context register. __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2529,7 +2474,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { SetSourcePosition(expr->position()); // Call the evaluated function. - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ Peek(x1, (arg_count + 1) * kXRegSize); __ CallStub(&stub); RecordJSReturnSite(expr); @@ -2538,7 +2483,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { context()->DropAndPlug(1, x0); } else if (call_type == Call::GLOBAL_CALL) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else if (call_type == Call::LOOKUP_SLOT_CALL) { // Call to a lookup slot (dynamically introduced variable). @@ -2578,16 +2523,16 @@ void FullCodeGenerator::VisitCall(Call* expr) { // The receiver is either the global receiver or an object found // by LoadContextSlot. - EmitCallWithStub(expr); + EmitCall(expr); } else if (call_type == Call::PROPERTY_CALL) { Property* property = callee->AsProperty(); { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(property->obj()); } if (property->key()->IsPropertyName()) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else { - EmitKeyedCallWithIC(expr, property->key()); + EmitKeyedCallWithLoadIC(expr, property->key()); } } else { @@ -2599,7 +2544,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ LoadRoot(x1, Heap::kUndefinedValueRootIndex); __ Push(x1); // Emit function call. - EmitCallWithStub(expr); + EmitCall(expr); } #ifdef DEBUG @@ -2636,12 +2581,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ Peek(x1, arg_count * kXRegSize); // Record call targets in unoptimized code. - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); if (FLAG_pretenuring_call_new) { - StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), - isolate()->factory()->NewAllocationSite()); + EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); ASSERT(expr->AllocationSiteFeedbackSlot() == expr->CallNewFeedbackSlot() + 1); } @@ -2649,8 +2590,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ LoadObject(x2, FeedbackVector()); __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot())); - CallConstructStub stub(RECORD_CALL_TARGET); - __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); + CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); + __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); PrepareForBailoutForId(expr->ReturnId(), TOS_REG); context()->Plug(x0); } @@ -3033,7 +2974,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ Mov(x1, x0); __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); - ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); __ CallStub(&stub); context()->Plug(x0); } @@ -3124,31 +3065,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { } -void FullCodeGenerator::EmitLog(CallRuntime* expr) { - // Conditionally generate a log call. - // Args: - // 0 (literal string): The type of logging (corresponds to the flags). - // This is used to determine whether or not to generate the log call. - // 1 (string): Format string. Access the string at argument index 2 - // with '%2s' (see Logger::LogRuntime for all the formats). - // 2 (array): Arguments to the format string. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT_EQ(args->length(), 3); - if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { - VisitForStackValue(args->at(1)); - VisitForStackValue(args->at(2)); - __ CallRuntime(Runtime::kHiddenLog, 2); - } - - // Finally, we're expected to leave a value on the top of the stack. - __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); - context()->Plug(x0); -} - - void FullCodeGenerator::EmitSubString(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - SubStringStub stub; + SubStringStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3161,7 +3080,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpExecStub stub; + RegExpExecStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 4); VisitForStackValue(args->at(0)); @@ -3303,7 +3222,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { ASSERT(args->length() == 2); VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - MathPowStub stub(MathPowStub::ON_STACK); + MathPowStub stub(isolate(), MathPowStub::ON_STACK); __ CallStub(&stub); context()->Plug(x0); } @@ -3345,7 +3264,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { // Load the argument into x0 and call the stub. VisitForAccumulatorValue(args->at(0)); - NumberToStringStub stub; + NumberToStringStub stub(isolate()); __ CallStub(&stub); context()->Plug(x0); } @@ -3473,7 +3392,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { VisitForAccumulatorValue(args->at(1)); __ Pop(x1); - StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); + StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); __ CallStub(&stub); context()->Plug(x0); @@ -3486,32 +3405,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - StringCompareStub stub; + StringCompareStub stub(isolate()); __ CallStub(&stub); context()->Plug(x0); } -void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_log, 1); - context()->Plug(x0); -} - - -void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_sqrt, 1); - context()->Plug(x0); -} - - void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { ASM_LOCATION("FullCodeGenerator::EmitCallFunction"); ZoneList<Expression*>* args = expr->arguments(); @@ -3545,7 +3444,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3889,7 +3788,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { // Record source position of the IC call. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ Peek(x1, (arg_count + 1) * kPointerSize); __ CallStub(&stub); @@ -4021,7 +3920,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { - ASSERT(expr->expression()->IsValidLeftHandSide()); + ASSERT(expr->expression()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ CountOperation"); SetSourcePosition(expr->position()); @@ -4107,7 +4006,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ B(&stub_call); __ Bind(&slow); } - ToNumberStub convert_stub; + ToNumberStub convert_stub(isolate()); __ CallStub(&convert_stub); // Save result for postfix expressions. @@ -4139,8 +4038,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { { Assembler::BlockPoolsScope scope(masm_); - BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); + BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE); + CallIC(stub.GetCode(), expr->CountBinOpFeedbackId()); patch_site.EmitPatchInfo(); } __ Bind(&done); @@ -4254,13 +4153,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - if (check->Equals(isolate()->heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(check, factory->number_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string"); __ JumpIfSmi(x0, if_true); __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->string_string())) { + } else if (String::Equals(check, factory->string_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string"); __ JumpIfSmi(x0, if_false); // Check for undetectable objects => false. @@ -4268,22 +4168,22 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->symbol_string())) { + } else if (String::Equals(check, factory->symbol_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string"); __ JumpIfSmi(x0, if_false); __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->boolean_string())) { + } else if (String::Equals(check, factory->boolean_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string"); __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true); __ CompareRoot(x0, Heap::kFalseValueRootIndex); Split(eq, if_true, if_false, fall_through); } else if (FLAG_harmony_typeof && - check->Equals(isolate()->heap()->null_string())) { + String::Equals(check, factory->null_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string"); __ CompareRoot(x0, Heap::kNullValueRootIndex); Split(eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->undefined_string())) { + } else if (String::Equals(check, factory->undefined_string())) { ASM_LOCATION( "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string"); __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true); @@ -4293,7 +4193,7 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true, fall_through); - } else if (check->Equals(isolate()->heap()->function_string())) { + } else if (String::Equals(check, factory->function_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string"); __ JumpIfSmi(x0, if_false); STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); @@ -4301,7 +4201,7 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->object_string())) { + } else if (String::Equals(check, factory->object_string())) { ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string"); __ JumpIfSmi(x0, if_false); if (!FLAG_harmony_typeof) { @@ -4360,7 +4260,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::INSTANCEOF: { VisitForStackValue(expr->right()); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ CallStub(&stub); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); // The stub returns 0 for true. @@ -4568,7 +4468,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) { CallIC(ic, TypeFeedbackId::None()); __ Mov(x1, x0); __ Poke(x1, 2 * kPointerSize); - CallFunctionStub stub(1, CALL_AS_METHOD); + CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); __ CallStub(&stub); __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -4721,7 +4621,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Label gc_required; Label allocated; - Handle<Map> map(isolate()->native_context()->generator_result_map()); + Handle<Map> map(isolate()->native_context()->iterator_result_map()); // Allocate and populate an object with this form: { value: VAL, done: DONE } @@ -4740,22 +4640,23 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Register result_value = x2; Register boolean_done = x3; Register empty_fixed_array = x4; + Register untagged_result = x5; __ Mov(map_reg, Operand(map)); __ Pop(result_value); __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done))); __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array())); ASSERT_EQ(map->instance_size(), 5 * kPointerSize); - // TODO(jbramley): Use Stp if possible. - __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset)); - __ Str(empty_fixed_array, - FieldMemOperand(result, JSObject::kPropertiesOffset)); - __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset)); - __ Str(result_value, - FieldMemOperand(result, - JSGeneratorObject::kResultValuePropertyOffset)); - __ Str(boolean_done, - FieldMemOperand(result, - JSGeneratorObject::kResultDonePropertyOffset)); + STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize == + JSObject::kElementsOffset); + STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize == + JSGeneratorObject::kResultDonePropertyOffset); + __ ObjectUntag(untagged_result, result); + __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset)); + __ Stp(empty_fixed_array, empty_fixed_array, + MemOperand(untagged_result, JSObject::kPropertiesOffset)); + __ Stp(result_value, boolean_done, + MemOperand(untagged_result, + JSGeneratorObject::kResultValuePropertyOffset)); // Only the value field needs a write barrier, as the other values are in the // root set. @@ -4835,8 +4736,9 @@ void FullCodeGenerator::EnterFinallyBlock() { ExternalReference has_pending_message = ExternalReference::address_of_has_pending_message(isolate()); + STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) __ Mov(x11, has_pending_message); - __ Ldr(x11, MemOperand(x11)); + __ Ldrb(x11, MemOperand(x11)); __ SmiTag(x11); __ Push(x10, x11); @@ -4864,7 +4766,8 @@ void FullCodeGenerator::ExitFinallyBlock() { ExternalReference has_pending_message = ExternalReference::address_of_has_pending_message(isolate()); __ Mov(x13, has_pending_message); - __ Str(x11, MemOperand(x13)); + STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) + __ Strb(x11, MemOperand(x13)); ExternalReference pending_message_obj = ExternalReference::address_of_pending_message_obj(isolate()); diff --git a/deps/v8/src/arm64/ic-arm64.cc b/deps/v8/src/arm64/ic-arm64.cc index 5fb7d633f..c09b847ba 100644 --- a/deps/v8/src/arm64/ic-arm64.cc +++ b/deps/v8/src/arm64/ic-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1044,7 +1021,6 @@ static void KeyedStoreGenerateGenericHelper( elements, x10, d0, - d1, &transition_double_elements); if (increment_length == kIncrementLength) { // Add 1 to receiver->length. diff --git a/deps/v8/src/arm64/instructions-arm64.cc b/deps/v8/src/arm64/instructions-arm64.cc index 4d1428a15..2996fc94c 100644 --- a/deps/v8/src/arm64/instructions-arm64.cc +++ b/deps/v8/src/arm64/instructions-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -254,11 +231,18 @@ void Instruction::SetImmPCOffsetTarget(Instruction* target) { void Instruction::SetPCRelImmTarget(Instruction* target) { // ADRP is not supported, so 'this' must point to an ADR instruction. - ASSERT(Mask(PCRelAddressingMask) == ADR); + ASSERT(IsAdr()); - Instr imm = Assembler::ImmPCRelAddress(DistanceTo(target)); - - SetInstructionBits(Mask(~ImmPCRel_mask) | imm); + int target_offset = DistanceTo(target); + Instr imm; + if (Instruction::IsValidPCRelOffset(target_offset)) { + imm = Assembler::ImmPCRelAddress(target_offset); + SetInstructionBits(Mask(~ImmPCRel_mask) | imm); + } else { + PatchingAssembler patcher(this, + PatchingAssembler::kAdrFarPatchableNInstrs); + patcher.PatchAdrFar(target); + } } diff --git a/deps/v8/src/arm64/instructions-arm64.h b/deps/v8/src/arm64/instructions-arm64.h index ab64cb2bf..968ddace0 100644 --- a/deps/v8/src/arm64/instructions-arm64.h +++ b/deps/v8/src/arm64/instructions-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_INSTRUCTIONS_ARM64_H_ #define V8_ARM64_INSTRUCTIONS_ARM64_H_ @@ -160,9 +137,10 @@ class Instruction { // ImmPCRel is a compound field (not present in INSTRUCTION_FIELDS_LIST), // formed from ImmPCRelLo and ImmPCRelHi. int ImmPCRel() const { + ASSERT(IsPCRelAddressing()); int const offset = ((ImmPCRelHi() << ImmPCRelLo_width) | ImmPCRelLo()); int const width = ImmPCRelLo_width + ImmPCRelHi_width; - return signed_bitextract_32(width-1, 0, offset); + return signed_bitextract_32(width - 1, 0, offset); } uint64_t ImmLogical(); @@ -191,6 +169,10 @@ class Instruction { return Mask(TestBranchFMask) == TestBranchFixed; } + bool IsImmBranch() const { + return BranchType() != UnknownBranchType; + } + bool IsLdrLiteral() const { return Mask(LoadLiteralFMask) == LoadLiteralFixed; } @@ -203,6 +185,10 @@ class Instruction { return Mask(PCRelAddressingFMask) == PCRelAddressingFixed; } + bool IsAdr() const { + return Mask(PCRelAddressingMask) == ADR; + } + bool IsLogicalImmediate() const { return Mask(LogicalImmediateFMask) == LogicalImmediateFixed; } @@ -211,6 +197,10 @@ class Instruction { return Mask(AddSubImmediateFMask) == AddSubImmediateFixed; } + bool IsAddSubShifted() const { + return Mask(AddSubShiftedFMask) == AddSubShiftedFixed; + } + bool IsAddSubExtended() const { return Mask(AddSubExtendedFMask) == AddSubExtendedFixed; } @@ -387,6 +377,10 @@ class Instruction { } + static const int ImmPCRelRangeBitwidth = 21; + static bool IsValidPCRelOffset(int offset) { + return is_int21(offset); + } void SetPCRelImmTarget(Instruction* target); void SetBranchImmTarget(Instruction* target); }; diff --git a/deps/v8/src/arm64/instrument-arm64.cc b/deps/v8/src/arm64/instrument-arm64.cc index 6744707fd..a6fe1234b 100644 --- a/deps/v8/src/arm64/instrument-arm64.cc +++ b/deps/v8/src/arm64/instrument-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "arm64/instrument-arm64.h" diff --git a/deps/v8/src/arm64/instrument-arm64.h b/deps/v8/src/arm64/instrument-arm64.h index 996cc07ac..2d41b5857 100644 --- a/deps/v8/src/arm64/instrument-arm64.h +++ b/deps/v8/src/arm64/instrument-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_INSTRUMENT_ARM64_H_ #define V8_ARM64_INSTRUMENT_ARM64_H_ diff --git a/deps/v8/src/arm64/lithium-arm64.cc b/deps/v8/src/arm64/lithium-arm64.cc index 60bf51ebb..2411b7074 100644 --- a/deps/v8/src/arm64/lithium-arm64.cc +++ b/deps/v8/src/arm64/lithium-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -515,6 +492,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); + // We can't really figure out if the environment is needed or not. + instr->environment()->set_has_been_used(); } return instr; @@ -541,6 +520,19 @@ LUnallocated* LChunkBuilder::TempRegister() { } +LUnallocated* LChunkBuilder::TempDoubleRegister() { + LUnallocated* operand = + new(zone()) LUnallocated(LUnallocated::MUST_HAVE_DOUBLE_REGISTER); + int vreg = allocator_->GetVirtualRegister(); + if (!allocator_->AllocationOk()) { + Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister); + vreg = 0; + } + operand->set_virtual_register(vreg); + return operand; +} + + int LPlatformChunk::GetNextSpillIndex() { return spill_slot_count_++; } @@ -702,7 +694,8 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 - if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) { + if (!(instr->ClobbersRegisters() && + instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { @@ -846,6 +839,12 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { if (instr->representation().IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); + + LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); + if (shifted_operation != NULL) { + return shifted_operation; + } + LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); LOperand* right = UseRegisterOrConstantAtStart(instr->BetterRightOperand()); @@ -926,6 +925,11 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { ASSERT(instr->right()->representation().Equals(instr->representation())); ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32)); + LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); + if (shifted_operation != NULL) { + return shifted_operation; + } + LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); LOperand* right = UseRegisterOrConstantAtStart(instr->BetterRightOperand()); @@ -947,9 +951,16 @@ LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) { LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - LOperand* value = UseRegisterOrConstantAtStart(instr->index()); - LOperand* length = UseRegister(instr->length()); - return AssignEnvironment(new(zone()) LBoundsCheck(value, length)); + if (!FLAG_debug_code && instr->skip_check()) return NULL; + LOperand* index = UseRegisterOrConstantAtStart(instr->index()); + LOperand* length = !index->IsConstantOperand() + ? UseRegisterOrConstantAtStart(instr->length()) + : UseRegisterAtStart(instr->length()); + LInstruction* result = new(zone()) LBoundsCheck(index, length); + if (!FLAG_debug_code || !instr->skip_check()) { + result = AssignEnvironment(result); + } + return result; } @@ -1074,63 +1085,59 @@ LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) { LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); - + HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LDummyUse(value)); } from = Representation::Tagged(); } - if (from.IsTagged()) { if (to.IsDouble()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp = TempRegister(); - LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp); - return AssignEnvironment(DefineAsRegister(res)); + LInstruction* result = + DefineAsRegister(new(zone()) LNumberUntagD(value, temp)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } else if (to.IsSmi()) { - LOperand* value = UseRegister(instr->value()); - if (instr->value()->type().IsSmi()) { + LOperand* value = UseRegister(val); + if (val->type().IsSmi()) { return DefineSameAsFirst(new(zone()) LDummyUse(value)); } return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value))); } else { ASSERT(to.IsInteger32()); - LInstruction* res = NULL; - - if (instr->value()->type().IsSmi() || - instr->value()->representation().IsSmi()) { - LOperand* value = UseRegisterAtStart(instr->value()); - res = DefineAsRegister(new(zone()) LSmiUntag(value, false)); + if (val->type().IsSmi() || val->representation().IsSmi()) { + LOperand* value = UseRegisterAtStart(val); + return DefineAsRegister(new(zone()) LSmiUntag(value, false)); } else { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); - LOperand* temp2 = instr->CanTruncateToInt32() ? NULL : FixedTemp(d24); - res = DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2)); - res = AssignEnvironment(res); + LOperand* temp2 = instr->CanTruncateToInt32() + ? NULL : TempDoubleRegister(); + LInstruction* result = + DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } - - return res; } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); - LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2); return AssignPointerMap(DefineAsRegister(result)); } else { ASSERT(to.IsSmi() || to.IsInteger32()); - LOperand* value = UseRegister(instr->value()); - if (instr->CanTruncateToInt32()) { - LTruncateDoubleToIntOrSmi* result = - new(zone()) LTruncateDoubleToIntOrSmi(value); - return DefineAsRegister(result); + LOperand* value = UseRegister(val); + return DefineAsRegister(new(zone()) LTruncateDoubleToIntOrSmi(value)); } else { + LOperand* value = UseRegister(val); LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value); return AssignEnvironment(DefineAsRegister(result)); } @@ -1138,37 +1145,35 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { - if (instr->value()->CheckFlag(HInstruction::kUint32)) { - LOperand* value = UseRegister(instr->value()); - LNumberTagU* result = new(zone()) LNumberTagU(value, - TempRegister(), - TempRegister()); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + if (val->CheckFlag(HInstruction::kUint32)) { + LOperand* value = UseRegister(val); + LNumberTagU* result = + new(zone()) LNumberTagU(value, TempRegister(), TempRegister()); + return AssignPointerMap(DefineAsRegister(result)); } else { STATIC_ASSERT((kMinInt == Smi::kMinValue) && (kMaxInt == Smi::kMaxValue)); - LOperand* value = UseRegisterAtStart(instr->value()); + LOperand* value = UseRegisterAtStart(val); return DefineAsRegister(new(zone()) LSmiTag(value)); } } else if (to.IsSmi()) { - LOperand* value = UseRegisterAtStart(instr->value()); + LOperand* value = UseRegisterAtStart(val); LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value)); - if (instr->value()->CheckFlag(HInstruction::kUint32)) { + if (val->CheckFlag(HInstruction::kUint32)) { result = AssignEnvironment(result); } return result; } else { ASSERT(to.IsDouble()); - if (instr->value()->CheckFlag(HInstruction::kUint32)) { + if (val->CheckFlag(HInstruction::kUint32)) { return DefineAsRegister( - new(zone()) LUint32ToDouble(UseRegisterAtStart(instr->value()))); + new(zone()) LUint32ToDouble(UseRegisterAtStart(val))); } else { return DefineAsRegister( - new(zone()) LInteger32ToDouble(UseRegisterAtStart(instr->value()))); + new(zone()) LInteger32ToDouble(UseRegisterAtStart(val))); } } } - UNREACHABLE(); return NULL; } @@ -1189,27 +1194,23 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { - if (instr->CanOmitMapChecks()) { - // LCheckMaps does nothing in this case. - return new(zone()) LCheckMaps(NULL); - } else { - LOperand* value = UseRegisterAtStart(instr->value()); - LOperand* temp = TempRegister(); - - if (instr->has_migration_target()) { - info()->MarkAsDeferredCalling(); - LInstruction* result = new(zone()) LCheckMaps(value, temp); - return AssignPointerMap(AssignEnvironment(result)); - } else { - return AssignEnvironment(new(zone()) LCheckMaps(value, temp)); - } + if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps; + LOperand* value = UseRegisterAtStart(instr->value()); + LOperand* temp = TempRegister(); + LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value, temp)); + if (instr->HasMigrationTarget()) { + info()->MarkAsDeferredCalling(); + result = AssignPointerMap(result); } + return result; } LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckNonSmi(value)); + LInstruction* result = new(zone()) LCheckNonSmi(value); + if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result); + return result; } @@ -1232,7 +1233,7 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) { return AssignEnvironment( DefineAsRegister(new(zone()) LClampTToUint8(reg, TempRegister(), - FixedTemp(d24)))); + TempDoubleRegister()))); } } @@ -1249,8 +1250,9 @@ LInstruction* LChunkBuilder::DoClassOfTestAndBranch( LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { + LInstruction* goto_instr = CheckElideControlInstruction(instr); + if (goto_instr != NULL) return goto_instr; Representation r = instr->representation(); - if (r.IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(r)); ASSERT(instr->right()->representation().Equals(r)); @@ -1418,8 +1420,12 @@ LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { LOperand* divisor = UseRegister(instr->right()); LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) ? NULL : TempRegister(); - LDivI* div = new(zone()) LDivI(dividend, divisor, temp); - return AssignEnvironment(DefineAsRegister(div)); + LInstruction* result = + DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp)); + if (!instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + result = AssignEnvironment(result); + } + return result; } @@ -1447,6 +1453,7 @@ LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) { LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); + outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining(instr->closure(), instr->arguments_count(), @@ -1622,7 +1629,10 @@ LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) { LOperand* context = UseRegisterAtStart(instr->value()); LInstruction* result = DefineAsRegister(new(zone()) LLoadContextSlot(context)); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -1656,7 +1666,7 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { ASSERT(instr->key()->representation().IsSmiOrInteger32()); ElementsKind elements_kind = instr->elements_kind(); LOperand* elements = UseRegister(instr->elements()); - LOperand* key = UseRegisterOrConstantAtStart(instr->key()); + LOperand* key = UseRegisterOrConstant(instr->key()); if (!instr->is_typed_elements()) { if (instr->representation().IsDouble()) { @@ -1687,17 +1697,14 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { IsDoubleOrFloatElementsKind(instr->elements_kind()))); LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister(); - LLoadKeyedExternal* result = - new(zone()) LLoadKeyedExternal(elements, key, temp); - // An unsigned int array load might overflow and cause a deopt. Make sure it - // has an environment. - if (instr->RequiresHoleCheck() || - elements_kind == EXTERNAL_UINT32_ELEMENTS || - elements_kind == UINT32_ELEMENTS) { - return AssignEnvironment(DefineAsRegister(result)); - } else { - return DefineAsRegister(result); + LInstruction* result = DefineAsRegister( + new(zone()) LLoadKeyedExternal(elements, key, temp)); + if ((elements_kind == EXTERNAL_UINT32_ELEMENTS || + elements_kind == UINT32_ELEMENTS) && + !instr->CheckFlag(HInstruction::kUint32)) { + result = AssignEnvironment(result); } + return result; } } @@ -1885,13 +1892,10 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) { bool can_overflow = instr->CheckFlag(HValue::kCanOverflow); bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero); - bool needs_environment = can_overflow || bailout_on_minus_zero; HValue* least_const = instr->BetterLeftOperand(); HValue* most_const = instr->BetterRightOperand(); - LOperand* left; - // LMulConstI can handle a subset of constants: // With support for overflow detection: // -1, 0, 1, 2 @@ -1911,26 +1915,27 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) { IsPowerOf2(constant_abs - 1))))) { LConstantOperand* right = UseConstant(most_const); bool need_register = IsPowerOf2(constant_abs) && !small_constant; - left = need_register ? UseRegister(least_const) - : UseRegisterAtStart(least_const); - LMulConstIS* mul = new(zone()) LMulConstIS(left, right); - if (needs_environment) AssignEnvironment(mul); - return DefineAsRegister(mul); + LOperand* left = need_register ? UseRegister(least_const) + : UseRegisterAtStart(least_const); + LInstruction* result = + DefineAsRegister(new(zone()) LMulConstIS(left, right)); + if ((bailout_on_minus_zero && constant <= 0) || can_overflow) { + result = AssignEnvironment(result); + } + return result; } } - left = UseRegisterAtStart(least_const); // LMulI/S can handle all cases, but it requires that a register is // allocated for the second operand. - LInstruction* result; - if (instr->representation().IsSmi()) { - LOperand* right = UseRegisterAtStart(most_const); - result = DefineAsRegister(new(zone()) LMulS(left, right)); - } else { - LOperand* right = UseRegisterAtStart(most_const); - result = DefineAsRegister(new(zone()) LMulI(left, right)); + LOperand* left = UseRegisterAtStart(least_const); + LOperand* right = UseRegisterAtStart(most_const); + LInstruction* result = instr->representation().IsSmi() + ? DefineAsRegister(new(zone()) LMulS(left, right)) + : DefineAsRegister(new(zone()) LMulI(left, right)); + if ((bailout_on_minus_zero && least_const != most_const) || can_overflow) { + result = AssignEnvironment(result); } - if (needs_environment) AssignEnvironment(result); return result; } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::MUL, instr); @@ -1956,7 +1961,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { } else { ASSERT(info()->IsStub()); CodeStubInterfaceDescriptor* descriptor = - info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); + info()->code_stub()->GetInterfaceDescriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor->GetParameterRegister(index); return DefineFixed(result, reg); @@ -2045,6 +2050,117 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { } +HBitwiseBinaryOperation* LChunkBuilder::CanTransformToShiftedOp(HValue* val, + HValue** left) { + if (!val->representation().IsInteger32()) return NULL; + if (!(val->IsBitwise() || val->IsAdd() || val->IsSub())) return NULL; + + HBinaryOperation* hinstr = HBinaryOperation::cast(val); + HValue* hleft = hinstr->left(); + HValue* hright = hinstr->right(); + ASSERT(hleft->representation().Equals(hinstr->representation())); + ASSERT(hright->representation().Equals(hinstr->representation())); + + if ((hright->IsConstant() && + LikelyFitsImmField(hinstr, HConstant::cast(hright)->Integer32Value())) || + (hinstr->IsCommutative() && hleft->IsConstant() && + LikelyFitsImmField(hinstr, HConstant::cast(hleft)->Integer32Value()))) { + // The constant operand will likely fit in the immediate field. We are + // better off with + // lsl x8, x9, #imm + // add x0, x8, #imm2 + // than with + // mov x16, #imm2 + // add x0, x16, x9 LSL #imm + return NULL; + } + + HBitwiseBinaryOperation* shift = NULL; + // TODO(aleram): We will miss situations where a shift operation is used by + // different instructions both as a left and right operands. + if (hright->IsBitwiseBinaryShift() && + HBitwiseBinaryOperation::cast(hright)->right()->IsConstant()) { + shift = HBitwiseBinaryOperation::cast(hright); + if (left != NULL) { + *left = hleft; + } + } else if (hinstr->IsCommutative() && + hleft->IsBitwiseBinaryShift() && + HBitwiseBinaryOperation::cast(hleft)->right()->IsConstant()) { + shift = HBitwiseBinaryOperation::cast(hleft); + if (left != NULL) { + *left = hright; + } + } else { + return NULL; + } + + if ((JSShiftAmountFromHConstant(shift->right()) == 0) && shift->IsShr()) { + // Shifts right by zero can deoptimize. + return NULL; + } + + return shift; +} + + +bool LChunkBuilder::ShiftCanBeOptimizedAway(HBitwiseBinaryOperation* shift) { + if (!shift->representation().IsInteger32()) { + return false; + } + for (HUseIterator it(shift->uses()); !it.Done(); it.Advance()) { + if (shift != CanTransformToShiftedOp(it.value())) { + return false; + } + } + return true; +} + + +LInstruction* LChunkBuilder::TryDoOpWithShiftedRightOperand( + HBinaryOperation* instr) { + HValue* left; + HBitwiseBinaryOperation* shift = CanTransformToShiftedOp(instr, &left); + + if ((shift != NULL) && ShiftCanBeOptimizedAway(shift)) { + return DoShiftedBinaryOp(instr, left, shift); + } + return NULL; +} + + +LInstruction* LChunkBuilder::DoShiftedBinaryOp( + HBinaryOperation* hinstr, HValue* hleft, HBitwiseBinaryOperation* hshift) { + ASSERT(hshift->IsBitwiseBinaryShift()); + ASSERT(!hshift->IsShr() || (JSShiftAmountFromHConstant(hshift->right()) > 0)); + + LTemplateResultInstruction<1>* res; + LOperand* left = UseRegisterAtStart(hleft); + LOperand* right = UseRegisterAtStart(hshift->left()); + LOperand* shift_amount = UseConstant(hshift->right()); + Shift shift_op; + switch (hshift->opcode()) { + case HValue::kShl: shift_op = LSL; break; + case HValue::kShr: shift_op = LSR; break; + case HValue::kSar: shift_op = ASR; break; + default: UNREACHABLE(); shift_op = NO_SHIFT; + } + + if (hinstr->IsBitwise()) { + res = new(zone()) LBitI(left, right, shift_op, shift_amount); + } else if (hinstr->IsAdd()) { + res = new(zone()) LAddI(left, right, shift_op, shift_amount); + } else { + ASSERT(hinstr->IsSub()); + res = new(zone()) LSubI(left, right, shift_op, shift_amount); + } + if (hinstr->CheckFlag(HValue::kCanOverflow)) { + AssignEnvironment(res); + } + return DefineAsRegister(res); +} + + LInstruction* LChunkBuilder::DoShift(Token::Value op, HBitwiseBinaryOperation* instr) { if (instr->representation().IsTagged()) { @@ -2056,6 +2172,10 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op, ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); + if (ShiftCanBeOptimizedAway(instr)) { + return NULL; + } + LOperand* left = instr->representation().IsSmi() ? UseRegister(instr->left()) : UseRegisterAtStart(instr->left()); @@ -2066,8 +2186,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op, int constant_value = 0; if (right_value->IsConstant()) { right = UseConstant(right_value); - HConstant* constant = HConstant::cast(right_value); - constant_value = constant->Integer32Value() & 0x1f; + constant_value = JSShiftAmountFromHConstant(right_value); } else { right = UseRegisterAtStart(right_value); if (op == Token::ROR) { @@ -2160,7 +2279,10 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { value = UseRegister(instr->value()); } LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -2177,10 +2299,10 @@ LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) { LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) { + LOperand* key = UseRegisterOrConstant(instr->key()); LOperand* temp = NULL; LOperand* elements = NULL; LOperand* val = NULL; - LOperand* key = UseRegisterOrConstantAtStart(instr->key()); if (!instr->is_typed_elements() && instr->value()->representation().IsTagged() && @@ -2294,7 +2416,7 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { LOperand* context = UseAny(instr->context()); LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(context, string, index); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } @@ -2324,6 +2446,12 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) { if (instr->representation().IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); + + LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); + if (shifted_operation != NULL) { + return shifted_operation; + } + LOperand *left; if (instr->left()->IsConstant() && (HConstant::cast(instr->left())->Integer32Value() == 0)) { @@ -2365,17 +2493,18 @@ LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) { LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { - LOperand* object = UseRegister(instr->object()); if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { + LOperand* object = UseRegister(instr->object()); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, NULL, TempRegister(), TempRegister()); return result; } else { + LOperand* object = UseFixed(instr->object(), x0); LOperand* context = UseFixed(instr->context(), cp); LTransitionElementsKind* result = - new(zone()) LTransitionElementsKind(object, context, TempRegister()); - return AssignPointerMap(result); + new(zone()) LTransitionElementsKind(object, context, NULL, NULL); + return MarkAsCall(result, instr); } } @@ -2429,29 +2558,21 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) { LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LOperand* temp3 = TempRegister(); - LMathAbsTagged* result = - new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + LInstruction* result = DefineAsRegister( + new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3)); + return AssignEnvironment(AssignPointerMap(result)); } else { LOperand* input = UseRegisterAtStart(instr->value()); - LMathAbs* result = new(zone()) LMathAbs(input); - if (r.IsDouble()) { - // The Double case can never fail so it doesn't need an environment. - return DefineAsRegister(result); - } else { - ASSERT(r.IsInteger32() || r.IsSmi()); - // The Integer32 and Smi cases need an environment because they can - // deoptimize on minimum representable number. - return AssignEnvironment(DefineAsRegister(result)); - } + LInstruction* result = DefineAsRegister(new(zone()) LMathAbs(input)); + if (!r.IsDouble()) result = AssignEnvironment(result); + return result; } } case kMathExp: { ASSERT(instr->representation().IsDouble()); ASSERT(instr->value()->representation().IsDouble()); LOperand* input = UseRegister(instr->value()); - // TODO(all): Implement TempFPRegister. - LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily. + LOperand* double_temp1 = TempDoubleRegister(); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LOperand* temp3 = TempRegister(); @@ -2460,14 +2581,16 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) { return DefineAsRegister(result); } case kMathFloor: { - ASSERT(instr->representation().IsInteger32()); ASSERT(instr->value()->representation().IsDouble()); - // TODO(jbramley): ARM64 can easily handle a double argument with frintm, - // but we're never asked for it here. At the moment, we fall back to the - // runtime if the result doesn't fit, like the other architectures. LOperand* input = UseRegisterAtStart(instr->value()); - LMathFloor* result = new(zone()) LMathFloor(input); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + if (instr->representation().IsInteger32()) { + LMathFloorI* result = new(zone()) LMathFloorI(input); + return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + } else { + ASSERT(instr->representation().IsDouble()); + LMathFloorD* result = new(zone()) LMathFloorD(input); + return DefineAsRegister(result); + } } case kMathLog: { ASSERT(instr->representation().IsDouble()); @@ -2483,14 +2606,17 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) { return DefineAsRegister(new(zone()) LMathPowHalf(input)); } case kMathRound: { - ASSERT(instr->representation().IsInteger32()); ASSERT(instr->value()->representation().IsDouble()); - // TODO(jbramley): As with kMathFloor, we can probably handle double - // results fairly easily, but we are never asked for them. LOperand* input = UseRegister(instr->value()); - LOperand* temp = FixedTemp(d24); // Choosen arbitrarily. - LMathRound* result = new(zone()) LMathRound(input, temp); - return AssignEnvironment(DefineAsRegister(result)); + if (instr->representation().IsInteger32()) { + LOperand* temp = TempDoubleRegister(); + LMathRoundI* result = new(zone()) LMathRoundI(input, temp); + return AssignEnvironment(DefineAsRegister(result)); + } else { + ASSERT(instr->representation().IsDouble()); + LMathRoundD* result = new(zone()) LMathRoundD(input); + return DefineAsRegister(result); + } } case kMathSqrt: { ASSERT(instr->representation().IsDouble()); @@ -2561,7 +2687,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegisterAtStart(instr->object()); LOperand* index = UseRegister(instr->index()); - return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); + LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index); + LInstruction* result = DefineSameAsFirst(load); + return AssignPointerMap(result); } diff --git a/deps/v8/src/arm64/lithium-arm64.h b/deps/v8/src/arm64/lithium-arm64.h index da3c5f17b..3abc388fe 100644 --- a/deps/v8/src/arm64/lithium-arm64.h +++ b/deps/v8/src/arm64/lithium-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_LITHIUM_ARM64_H_ #define V8_ARM64_LITHIUM_ARM64_H_ @@ -138,11 +115,13 @@ class LCodeGen; V(MathAbsTagged) \ V(MathClz32) \ V(MathExp) \ - V(MathFloor) \ + V(MathFloorD) \ + V(MathFloorI) \ V(MathLog) \ V(MathMinMax) \ V(MathPowHalf) \ - V(MathRound) \ + V(MathRoundD) \ + V(MathRoundI) \ V(MathSqrt) \ V(ModByConstI) \ V(ModByPowerOf2I) \ @@ -270,7 +249,9 @@ class LInstruction : public ZoneObject { // Interface to the register allocator and iterators. bool ClobbersTemps() const { return IsCall(); } bool ClobbersRegisters() const { return IsCall(); } - virtual bool ClobbersDoubleRegisters() const { return IsCall(); } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const { + return IsCall(); + } bool IsMarkedAsCall() const { return IsCall(); } virtual bool HasResult() const = 0; @@ -584,7 +565,14 @@ class LAddE V8_FINAL : public LTemplateInstruction<1, 2, 0> { class LAddI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: - LAddI(LOperand* left, LOperand* right) { + LAddI(LOperand* left, LOperand* right) + : shift_(NO_SHIFT), shift_amount_(0) { + inputs_[0] = left; + inputs_[1] = right; + } + + LAddI(LOperand* left, LOperand* right, Shift shift, LOperand* shift_amount) + : shift_(shift), shift_amount_(shift_amount) { inputs_[0] = left; inputs_[1] = right; } @@ -592,8 +580,15 @@ class LAddI V8_FINAL : public LTemplateInstruction<1, 2, 0> { LOperand* left() { return inputs_[0]; } LOperand* right() { return inputs_[1]; } + Shift shift() const { return shift_; } + LOperand* shift_amount() const { return shift_amount_; } + DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i") DECLARE_HYDROGEN_ACCESSOR(Add) + + protected: + Shift shift_; + LOperand* shift_amount_; }; @@ -753,7 +748,14 @@ class LBoundsCheck V8_FINAL : public LTemplateInstruction<0, 2, 0> { class LBitI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: - LBitI(LOperand* left, LOperand* right) { + LBitI(LOperand* left, LOperand* right) + : shift_(NO_SHIFT), shift_amount_(0) { + inputs_[0] = left; + inputs_[1] = right; + } + + LBitI(LOperand* left, LOperand* right, Shift shift, LOperand* shift_amount) + : shift_(shift), shift_amount_(shift_amount) { inputs_[0] = left; inputs_[1] = right; } @@ -761,10 +763,17 @@ class LBitI V8_FINAL : public LTemplateInstruction<1, 2, 0> { LOperand* left() { return inputs_[0]; } LOperand* right() { return inputs_[1]; } + Shift shift() const { return shift_; } + LOperand* shift_amount() const { return shift_amount_; } + Token::Value op() const { return hydrogen()->op(); } DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i") DECLARE_HYDROGEN_ACCESSOR(Bitwise) + + protected: + Shift shift_; + LOperand* shift_amount_; }; @@ -887,7 +896,7 @@ class LCallRuntime V8_FINAL : public LTemplateInstruction<1, 1, 0> { DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime") DECLARE_HYDROGEN_ACCESSOR(CallRuntime) - virtual bool ClobbersDoubleRegisters() const V8_OVERRIDE { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { return save_doubles() == kDontSaveFPRegs; } @@ -927,7 +936,7 @@ class LCheckInstanceType V8_FINAL : public LTemplateInstruction<0, 1, 1> { class LCheckMaps V8_FINAL : public LTemplateInstruction<0, 1, 1> { public: - explicit LCheckMaps(LOperand* value, LOperand* temp = NULL) { + LCheckMaps(LOperand* value = NULL, LOperand* temp = NULL) { inputs_[0] = value; temps_[0] = temp; } @@ -1324,14 +1333,14 @@ class LDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 1> { class LDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { public: - LDivI(LOperand* left, LOperand* right, LOperand* temp) { - inputs_[0] = left; - inputs_[1] = right; + LDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; temps_[0] = temp; } - LOperand* left() { return inputs_[0]; } - LOperand* right() { return inputs_[1]; } + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i") @@ -1930,10 +1939,19 @@ class LMathExp V8_FINAL : public LUnaryMathOperation<4> { }; -class LMathFloor V8_FINAL : public LUnaryMathOperation<0> { +// Math.floor with a double result. +class LMathFloorD V8_FINAL : public LUnaryMathOperation<0> { + public: + explicit LMathFloorD(LOperand* value) : LUnaryMathOperation<0>(value) { } + DECLARE_CONCRETE_INSTRUCTION(MathFloorD, "math-floor-d") +}; + + +// Math.floor with an integer result. +class LMathFloorI V8_FINAL : public LUnaryMathOperation<0> { public: - explicit LMathFloor(LOperand* value) : LUnaryMathOperation<0>(value) { } - DECLARE_CONCRETE_INSTRUCTION(MathFloor, "math-floor") + explicit LMathFloorI(LOperand* value) : LUnaryMathOperation<0>(value) { } + DECLARE_CONCRETE_INSTRUCTION(MathFloorI, "math-floor-i") }; @@ -2029,16 +2047,28 @@ class LMathPowHalf V8_FINAL : public LUnaryMathOperation<0> { }; -class LMathRound V8_FINAL : public LUnaryMathOperation<1> { +// Math.round with an integer result. +class LMathRoundD V8_FINAL : public LUnaryMathOperation<0> { + public: + explicit LMathRoundD(LOperand* value) + : LUnaryMathOperation<0>(value) { + } + + DECLARE_CONCRETE_INSTRUCTION(MathRoundD, "math-round-d") +}; + + +// Math.round with an integer result. +class LMathRoundI V8_FINAL : public LUnaryMathOperation<1> { public: - LMathRound(LOperand* value, LOperand* temp1) + LMathRoundI(LOperand* value, LOperand* temp1) : LUnaryMathOperation<1>(value) { temps_[0] = temp1; } LOperand* temp1() { return temps_[0]; } - DECLARE_CONCRETE_INSTRUCTION(MathRound, "math-round") + DECLARE_CONCRETE_INSTRUCTION(MathRoundI, "math-round-i") }; @@ -2384,6 +2414,10 @@ class LStoreKeyed : public LTemplateInstruction<0, 3, T> { } bool NeedsCanonicalization() { + if (hydrogen()->value()->IsAdd() || hydrogen()->value()->IsSub() || + hydrogen()->value()->IsMul() || hydrogen()->value()->IsDiv()) { + return false; + } return this->hydrogen()->NeedsCanonicalization(); } uint32_t additional_index() const { return this->hydrogen()->index_offset(); } @@ -2500,7 +2534,6 @@ class LStoreNamedField V8_FINAL : public LTemplateInstruction<0, 2, 2> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; - Handle<Map> transition() const { return hydrogen()->transition_map(); } Representation representation() const { return hydrogen()->field_representation(); } @@ -2725,7 +2758,14 @@ class LStoreGlobalCell V8_FINAL : public LTemplateInstruction<0, 1, 2> { class LSubI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: - LSubI(LOperand* left, LOperand* right) { + LSubI(LOperand* left, LOperand* right) + : shift_(NO_SHIFT), shift_amount_(0) { + inputs_[0] = left; + inputs_[1] = right; + } + + LSubI(LOperand* left, LOperand* right, Shift shift, LOperand* shift_amount) + : shift_(shift), shift_amount_(shift_amount) { inputs_[0] = left; inputs_[1] = right; } @@ -2733,8 +2773,15 @@ class LSubI V8_FINAL : public LTemplateInstruction<1, 2, 0> { LOperand* left() { return inputs_[0]; } LOperand* right() { return inputs_[1]; } + Shift shift() const { return shift_; } + LOperand* shift_amount() const { return shift_amount_; } + DECLARE_CONCRETE_INSTRUCTION(SubI, "sub-i") DECLARE_HYDROGEN_ACCESSOR(Sub) + + protected: + Shift shift_; + LOperand* shift_amount_; }; @@ -2778,7 +2825,7 @@ class LTransitionElementsKind V8_FINAL : public LTemplateInstruction<0, 2, 2> { LTransitionElementsKind(LOperand* object, LOperand* context, LOperand* temp1, - LOperand* temp2 = NULL) { + LOperand* temp2) { inputs_[0] = object; inputs_[1] = context; temps_[0] = temp1; @@ -3042,6 +3089,9 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { // Temporary operand that must be in a register. MUST_USE_RESULT LUnallocated* TempRegister(); + // Temporary operand that must be in a double register. + MUST_USE_RESULT LUnallocated* TempDoubleRegister(); + // Temporary operand that must be in a fixed double register. MUST_USE_RESULT LOperand* FixedTemp(DoubleRegister reg); @@ -3075,6 +3125,39 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { void VisitInstruction(HInstruction* current); void DoBasicBlock(HBasicBlock* block); + int JSShiftAmountFromHConstant(HValue* constant) { + return HConstant::cast(constant)->Integer32Value() & 0x1f; + } + bool LikelyFitsImmField(HInstruction* instr, int imm) { + if (instr->IsAdd() || instr->IsSub()) { + return Assembler::IsImmAddSub(imm) || Assembler::IsImmAddSub(-imm); + } else { + ASSERT(instr->IsBitwise()); + unsigned unused_n, unused_imm_s, unused_imm_r; + return Assembler::IsImmLogical(imm, kWRegSizeInBits, + &unused_n, &unused_imm_s, &unused_imm_r); + } + } + + // Indicates if a sequence of the form + // lsl x8, x9, #imm + // add x0, x1, x8 + // can be replaced with: + // add x0, x1, x9 LSL #imm + // If this is not possible, the function returns NULL. Otherwise it returns a + // pointer to the shift instruction that would be optimized away. + HBitwiseBinaryOperation* CanTransformToShiftedOp(HValue* val, + HValue** left = NULL); + // Checks if all uses of the shift operation can optimize it away. + bool ShiftCanBeOptimizedAway(HBitwiseBinaryOperation* shift); + // Attempts to merge the binary operation and an eventual previous shift + // operation into a single operation. Returns the merged instruction on + // success, and NULL otherwise. + LInstruction* TryDoOpWithShiftedRightOperand(HBinaryOperation* op); + LInstruction* DoShiftedBinaryOp(HBinaryOperation* instr, + HValue* left, + HBitwiseBinaryOperation* shift); + LInstruction* DoShift(Token::Value op, HBitwiseBinaryOperation* instr); LInstruction* DoArithmeticD(Token::Value op, HArithmeticBinaryOperation* instr); diff --git a/deps/v8/src/arm64/lithium-codegen-arm64.cc b/deps/v8/src/arm64/lithium-codegen-arm64.cc index cd931e934..b064d3da9 100644 --- a/deps/v8/src/arm64/lithium-codegen-arm64.cc +++ b/deps/v8/src/arm64/lithium-codegen-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -376,6 +353,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode) { + environment->set_has_been_used(); if (!environment->HasBeenRegistered()) { int frame_count = 0; int jsframe_count = 0; @@ -429,8 +407,9 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ASSERT(ToRegister(instr->result()).Is(x0)); int arity = instr->arity(); - CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); + after_push_argument_ = false; } @@ -443,8 +422,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) { // No cell in x2 for construct type feedback in optimized code. __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); + after_push_argument_ = false; ASSERT(ToRegister(instr->result()).is(x0)); } @@ -465,8 +445,8 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { : DONT_OVERRIDE; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { Label done; if (IsFastPackedElementsKind(kind)) { @@ -477,19 +457,22 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { __ Cbz(x10, &packed_case); ElementsKind holey_kind = GetHoleyElementsKind(kind); - ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), + holey_kind, + override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ B(&done); __ Bind(&packed_case); } - ArraySingleArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ Bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } + after_push_argument_ = false; ASSERT(ToRegister(instr->result()).is(x0)); } @@ -511,7 +494,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) { if (context->IsRegister()) { __ Mov(cp, ToRegister(context)); } else if (context->IsStackSlot()) { - __ Ldr(cp, ToMemOperand(context)); + __ Ldr(cp, ToMemOperand(context, kMustUseFramePointer)); } else if (context->IsConstantOperand()) { HConstant* constant = chunk_->LookupConstant(LConstantOperand::cast(context)); @@ -709,7 +692,7 @@ bool LCodeGen::GeneratePrologue() { Comment(";;; Allocate local context"); // Argument to NewContext is the function, which is in x1. if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ Push(x1); @@ -909,13 +892,6 @@ void LCodeGen::FinishCode(Handle<Code> code) { code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); PopulateDeoptimizationData(code); - info()->CommitDependencies(code); -} - - -void LCodeGen::Abort(BailoutReason reason) { - info()->set_bailout_reason(reason); - status_ = ABORTED; } @@ -924,7 +900,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { if (length == 0) return; Handle<DeoptimizationInputData> data = - factory()->NewDeoptimizationInputData(length, TENURED); + DeoptimizationInputData::New(isolate(), length, TENURED); Handle<ByteArray> translations = translations_.CreateByteArray(isolate()->factory()); @@ -1232,9 +1208,9 @@ Operand LCodeGen::ToOperand32(LOperand* op, IntegerSignedness signedness) { Representation r = chunk_->LookupLiteralRepresentation(const_op); if (r.IsInteger32()) { ASSERT(constant->HasInteger32Value()); - return Operand(signedness == SIGNED_INT32 - ? constant->Integer32Value() - : static_cast<uint32_t>(constant->Integer32Value())); + return (signedness == SIGNED_INT32) + ? Operand(constant->Integer32Value()) + : Operand(static_cast<uint32_t>(constant->Integer32Value())); } else { // Other constants not implemented. Abort(kToOperand32UnsupportedImmediate); @@ -1252,13 +1228,38 @@ static ptrdiff_t ArgumentsOffsetWithoutFrame(ptrdiff_t index) { } -MemOperand LCodeGen::ToMemOperand(LOperand* op) const { +MemOperand LCodeGen::ToMemOperand(LOperand* op, StackMode stack_mode) const { ASSERT(op != NULL); ASSERT(!op->IsRegister()); ASSERT(!op->IsDoubleRegister()); ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); if (NeedsEagerFrame()) { - return MemOperand(fp, StackSlotOffset(op->index())); + int fp_offset = StackSlotOffset(op->index()); + if (op->index() >= 0) { + // Loads and stores have a bigger reach in positive offset than negative. + // When the load or the store can't be done in one instruction via fp + // (too big negative offset), we try to access via jssp (positive offset). + // We can reference a stack slot from jssp only if jssp references the end + // of the stack slots. It's not the case when: + // - stack_mode != kCanUseStackPointer: this is the case when a deferred + // code saved the registers. + // - after_push_argument_: arguments has been pushed for a call. + // - inlined_arguments_: inlined arguments have been pushed once. All the + // remainder of the function cannot trust jssp any longer. + // - saves_caller_doubles: some double registers have been pushed, jssp + // references the end of the double registers and not the end of the + // stack slots. + // Also, if the offset from fp is small enough to make a load/store in + // one instruction, we use a fp access. + if ((stack_mode == kCanUseStackPointer) && !after_push_argument_ && + !inlined_arguments_ && !is_int9(fp_offset) && + !info()->saves_caller_doubles()) { + int jssp_offset = + (GetStackSlotCount() - op->index() - 1) * kPointerSize; + return MemOperand(masm()->StackPointer(), jssp_offset); + } + } + return MemOperand(fp, fp_offset); } else { // Retrieve parameter without eager stack-frame relative to the // stack-pointer. @@ -1275,6 +1276,21 @@ Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { } +template<class LI> +Operand LCodeGen::ToShiftedRightOperand32(LOperand* right, LI* shift_info, + IntegerSignedness signedness) { + if (shift_info->shift() == NO_SHIFT) { + return (signedness == SIGNED_INT32) ? ToOperand32I(right) + : ToOperand32U(right); + } else { + return Operand( + ToRegister32(right), + shift_info->shift(), + JSShiftAmountFromLConstant(shift_info->shift_amount())); + } +} + + bool LCodeGen::IsSmi(LConstantOperand* op) const { return chunk_->LookupLiteralRepresentation(op).IsSmi(); } @@ -1471,7 +1487,8 @@ void LCodeGen::DoAddI(LAddI* instr) { bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); Register result = ToRegister32(instr->result()); Register left = ToRegister32(instr->left()); - Operand right = ToOperand32I(instr->right()); + Operand right = ToShiftedRightOperand32I(instr->right(), instr); + if (can_overflow) { __ Adds(result, left, right); DeoptimizeIf(vs, instr->environment()); @@ -1648,6 +1665,10 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) { void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { + // We push some arguments and they will be pop in an other block. We can't + // trust that jssp references the end of the stack slots until the end of + // the function. + inlined_arguments_ = true; Register result = ToRegister(instr->result()); if (instr->hydrogen()->from_inlined()) { @@ -1737,15 +1758,15 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->right()).is(x0)); ASSERT(ToRegister(instr->result()).is(x0)); - BinaryOpICStub stub(instr->op(), NO_OVERWRITE); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } void LCodeGen::DoBitI(LBitI* instr) { Register result = ToRegister32(instr->result()); Register left = ToRegister32(instr->left()); - Operand right = ToOperand32U(instr->right()); + Operand right = ToShiftedRightOperand32U(instr->right(), instr); switch (instr->op()) { case Token::BIT_AND: __ And(result, left, right); break; @@ -1774,36 +1795,25 @@ void LCodeGen::DoBitS(LBitS* instr) { } -void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { - if (FLAG_debug_code && check->hydrogen()->skip_check()) { - __ Assert(InvertCondition(cc), kEliminatedBoundsCheckFailed); - } else { - DeoptimizeIf(cc, check->environment()); - } -} - - void LCodeGen::DoBoundsCheck(LBoundsCheck *instr) { - if (instr->hydrogen()->skip_check()) return; - + Condition cond = instr->hydrogen()->allow_equality() ? hi : hs; + ASSERT(instr->hydrogen()->index()->representation().IsInteger32()); ASSERT(instr->hydrogen()->length()->representation().IsInteger32()); - Register length = ToRegister32(instr->length()); - if (instr->index()->IsConstantOperand()) { - int constant_index = - ToInteger32(LConstantOperand::cast(instr->index())); - - if (instr->hydrogen()->length()->representation().IsSmi()) { - __ Cmp(length, Smi::FromInt(constant_index)); - } else { - __ Cmp(length, constant_index); - } + Operand index = ToOperand32I(instr->index()); + Register length = ToRegister32(instr->length()); + __ Cmp(length, index); + cond = ReverseConditionForCmp(cond); } else { - ASSERT(instr->hydrogen()->index()->representation().IsInteger32()); - __ Cmp(length, ToRegister32(instr->index())); + Register index = ToRegister32(instr->index()); + Operand length = ToOperand32I(instr->length()); + __ Cmp(index, length); + } + if (FLAG_debug_code && instr->hydrogen()->skip_check()) { + __ Assert(InvertCondition(cond), kEliminatedBoundsCheckFailed); + } else { + DeoptimizeIf(cond, instr->environment()); } - Condition condition = instr->hydrogen()->allow_equality() ? lo : ls; - ApplyCheckIf(condition, instr); } @@ -2027,6 +2037,7 @@ void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { __ Call(target); } generator.AfterCall(); + after_push_argument_ = false; } @@ -2046,11 +2057,13 @@ void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { __ Call(x10); RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); + after_push_argument_ = false; } void LCodeGen::DoCallRuntime(LCallRuntime* instr) { CallRuntime(instr->function(), instr->arity(), instr); + after_push_argument_ = false; } @@ -2059,23 +2072,24 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ASSERT(ToRegister(instr->result()).is(x0)); switch (instr->hydrogen()->major_key()) { case CodeStub::RegExpExec: { - RegExpExecStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + RegExpExecStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::SubString: { - SubStringStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + SubStringStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::StringCompare: { - StringCompareStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + StringCompareStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } default: UNREACHABLE(); } + after_push_argument_ = false; } @@ -2117,9 +2131,11 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register object_; }; - if (instr->hydrogen()->CanOmitMapChecks()) { - ASSERT(instr->value() == NULL); - ASSERT(instr->temp() == NULL); + if (instr->hydrogen()->IsStabilityCheck()) { + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); + for (int i = 0; i < maps->size(); ++i) { + AddStabilityDependency(maps->at(i).handle()); + } return; } @@ -2129,24 +2145,26 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { __ Ldr(map_reg, FieldMemOperand(object, HeapObject::kMapOffset)); DeferredCheckMaps* deferred = NULL; - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { deferred = new(zone()) DeferredCheckMaps(this, instr, object); __ Bind(deferred->check_maps()); } - UniqueSet<Map> map_set = instr->hydrogen()->map_set(); + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); Label success; - for (int i = 0; i < map_set.size(); i++) { - Handle<Map> map = map_set.at(i).handle(); + for (int i = 0; i < maps->size() - 1; i++) { + Handle<Map> map = maps->at(i).handle(); __ CompareMap(map_reg, map); __ B(eq, &success); } + Handle<Map> map = maps->at(maps->size() - 1).handle(); + __ CompareMap(map_reg, map); // We didn't match a map. - if (instr->hydrogen()->has_migration_target()) { - __ B(deferred->entry()); + if (instr->hydrogen()->HasMigrationTarget()) { + __ B(ne, deferred->entry()); } else { - Deoptimize(instr->environment()); + DeoptimizeIf(ne, instr->environment()); } __ Bind(&success); @@ -2529,9 +2547,16 @@ void LCodeGen::DoConstantS(LConstantS* instr) { void LCodeGen::DoConstantT(LConstantT* instr) { - Handle<Object> value = instr->value(isolate()); + Handle<Object> object = instr->value(isolate()); AllowDeferredHandleDereference smi_check; - __ LoadObject(ToRegister(instr->result()), value); + if (instr->hydrogen()->HasObjectMap()) { + Handle<Map> object_map = instr->hydrogen()->ObjectMap().handle(); + ASSERT(object->IsHeapObject()); + ASSERT(!object_map->is_stable() || + *object_map == Handle<HeapObject>::cast(object)->map()); + USE(object_map); + } + __ LoadObject(ToRegister(instr->result()), object); } @@ -2580,19 +2605,15 @@ void LCodeGen::DoDateField(LDateField* instr) { Register temp1 = x10; Register temp2 = x11; Smi* index = instr->index(); - Label runtime, done, deopt, obj_ok; + Label runtime, done; ASSERT(object.is(result) && object.Is(x0)); ASSERT(instr->IsMarkedAsCall()); - __ JumpIfSmi(object, &deopt); + DeoptimizeIfSmi(object, instr->environment()); __ CompareObjectType(object, temp1, temp1, JS_DATE_TYPE); - __ B(eq, &obj_ok); - - __ Bind(&deopt); - Deoptimize(instr->environment()); + DeoptimizeIf(ne, instr->environment()); - __ Bind(&obj_ok); if (index->value() == 0) { __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); } else { @@ -2636,7 +2657,7 @@ void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { Register dividend = ToRegister32(instr->dividend()); int32_t divisor = instr->divisor(); Register result = ToRegister32(instr->result()); - ASSERT(divisor == kMinInt || (divisor != 0 && IsPowerOf2(Abs(divisor)))); + ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); ASSERT(!result.is(dividend)); // Check for (0 / -x) that will produce negative zero. @@ -2707,10 +2728,11 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. void LCodeGen::DoDivI(LDivI* instr) { HBinaryOperation* hdiv = instr->hydrogen(); - Register dividend = ToRegister32(instr->left()); - Register divisor = ToRegister32(instr->right()); + Register dividend = ToRegister32(instr->dividend()); + Register divisor = ToRegister32(instr->divisor()); Register result = ToRegister32(instr->result()); // Issue the division first, and then check for any deopt cases whilst the @@ -2722,10 +2744,9 @@ void LCodeGen::DoDivI(LDivI* instr) { return; } - Label deopt; // Check for x / 0. if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { - __ Cbz(divisor, &deopt); + DeoptimizeIfZero(divisor, instr->environment()); } // Check for (0 / -x) as that will produce negative zero. @@ -2737,7 +2758,7 @@ void LCodeGen::DoDivI(LDivI* instr) { // If the divisor >= 0 (pl, the opposite of mi) set the flags to // condition ne, so we don't deopt, ie. positive divisor doesn't deopt. __ Ccmp(dividend, 0, NoFlag, mi); - __ B(eq, &deopt); + DeoptimizeIf(eq, instr->environment()); } // Check for (kMinInt / -1). @@ -2749,19 +2770,13 @@ void LCodeGen::DoDivI(LDivI* instr) { // -1. If overflow is clear, set the flags for condition ne, as the // dividend isn't -1, and thus we shouldn't deopt. __ Ccmp(divisor, -1, NoFlag, vs); - __ B(eq, &deopt); + DeoptimizeIf(eq, instr->environment()); } // Compute remainder and deopt if it's not zero. Register remainder = ToRegister32(instr->temp()); __ Msub(remainder, result, divisor, dividend); - __ Cbnz(remainder, &deopt); - - Label div_ok; - __ B(&div_ok); - __ Bind(&deopt); - Deoptimize(instr->environment()); - __ Bind(&div_ok); + DeoptimizeIfNotZero(remainder, instr->environment()); } @@ -2773,7 +2788,7 @@ void LCodeGen::DoDoubleToIntOrSmi(LDoubleToIntOrSmi* instr) { DeoptimizeIfMinusZero(input, instr->environment()); } - __ TryConvertDoubleToInt32(result, input, double_scratch()); + __ TryRepresentDoubleAsInt32(result, input, double_scratch()); DeoptimizeIf(ne, instr->environment()); if (instr->tag_result()) { @@ -2806,10 +2821,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && instr->hydrogen()->has_no_literals()) { - FastNewClosureStub stub(instr->hydrogen()->strict_mode(), + FastNewClosureStub stub(isolate(), + instr->hydrogen()->strict_mode(), instr->hydrogen()->is_generator()); __ Mov(x2, Operand(instr->hydrogen()->shared_info())); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { __ Mov(x2, Operand(instr->hydrogen()->shared_info())); __ Mov(x1, Operand(pretenure ? factory()->true_value() @@ -2848,19 +2864,18 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { ASSERT(instr->IsMarkedAsCall()); ASSERT(object.Is(x0)); - Label deopt; - - __ JumpIfRoot(object, Heap::kUndefinedValueRootIndex, &deopt); + DeoptimizeIfRoot(object, Heap::kUndefinedValueRootIndex, + instr->environment()); __ LoadRoot(null_value, Heap::kNullValueRootIndex); __ Cmp(object, null_value); - __ B(eq, &deopt); + DeoptimizeIf(eq, instr->environment()); - __ JumpIfSmi(object, &deopt); + DeoptimizeIfSmi(object, instr->environment()); STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); __ CompareObjectType(object, x1, x1, LAST_JS_PROXY_TYPE); - __ B(le, &deopt); + DeoptimizeIf(le, instr->environment()); Label use_cache, call_runtime; __ CheckEnumCache(object, null_value, x1, x2, x3, x4, &call_runtime); @@ -2868,16 +2883,13 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { __ Ldr(object, FieldMemOperand(object, HeapObject::kMapOffset)); __ B(&use_cache); - __ Bind(&deopt); - Deoptimize(instr->environment()); - // Get the set of properties to enumerate. __ Bind(&call_runtime); __ Push(object); CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); __ Ldr(x1, FieldMemOperand(object, HeapObject::kMapOffset)); - __ JumpIfNotRoot(x1, Heap::kMetaMapRootIndex, &deopt); + DeoptimizeIfNotRoot(x1, Heap::kMetaMapRootIndex, instr->environment()); __ Bind(&use_cache); } @@ -2985,8 +2997,8 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) { ASSERT(ToRegister(instr->left()).Is(InstanceofStub::left())); ASSERT(ToRegister(instr->right()).Is(InstanceofStub::right())); - InstanceofStub stub(InstanceofStub::kArgsInRegisters); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); // InstanceofStub returns a result in x0: // 0 => not an instance @@ -3100,8 +3112,8 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left())); __ LoadObject(InstanceofStub::right(), instr->function()); - InstanceofStub stub(flags); - CallCodeGeneric(stub.GetCode(isolate()), + InstanceofStub stub(isolate(), flags); + CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); @@ -3144,6 +3156,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { instr, x1); } + after_push_argument_ = false; } @@ -3285,11 +3298,11 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { Register function = ToRegister(instr->function()); Register result = ToRegister(instr->result()); Register temp = ToRegister(instr->temp()); - Label deopt; // Check that the function really is a function. Leaves map in the result // register. - __ JumpIfNotObjectType(function, result, temp, JS_FUNCTION_TYPE, &deopt); + __ CompareObjectType(function, result, temp, JS_FUNCTION_TYPE); + DeoptimizeIf(ne, instr->environment()); // Make sure that the function has an instance prototype. Label non_instance; @@ -3301,7 +3314,8 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { JSFunction::kPrototypeOrInitialMapOffset)); // Check that the function has a prototype or an initial map. - __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &deopt); + DeoptimizeIfRoot(result, Heap::kTheHoleValueRootIndex, + instr->environment()); // If the function does not have an initial map, we're done. Label done; @@ -3316,11 +3330,6 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { // map. __ Bind(&non_instance); __ Ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); - __ B(&done); - - // Deoptimize case. - __ Bind(&deopt); - Deoptimize(instr->environment()); // All done. __ Bind(&done); @@ -3359,51 +3368,28 @@ MemOperand LCodeGen::PrepareKeyedExternalArrayOperand( ElementsKind elements_kind, int additional_index) { int element_size_shift = ElementsKindToShiftSize(elements_kind); - int additional_offset = IsFixedTypedArrayElementsKind(elements_kind) - ? FixedTypedArrayBase::kDataOffset - kHeapObjectTag - : 0; + int additional_offset = additional_index << element_size_shift; + if (IsFixedTypedArrayElementsKind(elements_kind)) { + additional_offset += FixedTypedArrayBase::kDataOffset - kHeapObjectTag; + } if (key_is_constant) { - int base_offset = ((constant_key + additional_index) << element_size_shift); - return MemOperand(base, base_offset + additional_offset); + int key_offset = constant_key << element_size_shift; + return MemOperand(base, key_offset + additional_offset); } - if (additional_index == 0) { - if (key_is_smi) { - // Key is smi: untag, and scale by element size. - __ Add(scratch, base, Operand::UntagSmiAndScale(key, element_size_shift)); - return MemOperand(scratch, additional_offset); - } else { - // Key is not smi, and element size is not byte: scale by element size. - if (additional_offset == 0) { - return MemOperand(base, key, SXTW, element_size_shift); - } else { - __ Add(scratch, base, Operand(key, SXTW, element_size_shift)); - return MemOperand(scratch, additional_offset); - } - } - } else { - // TODO(all): Try to combine these cases a bit more intelligently. - if (additional_offset == 0) { - if (key_is_smi) { - __ SmiUntag(scratch, key); - __ Add(scratch.W(), scratch.W(), additional_index); - } else { - __ Add(scratch.W(), key.W(), additional_index); - } - return MemOperand(base, scratch, LSL, element_size_shift); - } else { - if (key_is_smi) { - __ Add(scratch, base, - Operand::UntagSmiAndScale(key, element_size_shift)); - } else { - __ Add(scratch, base, Operand(key, SXTW, element_size_shift)); - } - return MemOperand( - scratch, - (additional_index << element_size_shift) + additional_offset); - } + if (key_is_smi) { + __ Add(scratch, base, Operand::UntagSmiAndScale(key, element_size_shift)); + return MemOperand(scratch, additional_offset); + } + + if (additional_offset == 0) { + return MemOperand(base, key, SXTW, element_size_shift); } + + ASSERT(!AreAliased(scratch, key)); + __ Add(scratch, base, additional_offset); + return MemOperand(scratch, key, SXTW, element_size_shift); } @@ -3496,11 +3482,14 @@ void LCodeGen::DoLoadKeyedExternal(LLoadKeyedExternal* instr) { } -void LCodeGen::CalcKeyedArrayBaseRegister(Register base, - Register elements, - Register key, - bool key_is_tagged, - ElementsKind elements_kind) { +MemOperand LCodeGen::PrepareKeyedArrayOperand(Register base, + Register elements, + Register key, + bool key_is_tagged, + ElementsKind elements_kind, + Representation representation, + int additional_index) { + STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0)); int element_size_shift = ElementsKindToShiftSize(elements_kind); // Even though the HLoad/StoreKeyed instructions force the input @@ -3509,11 +3498,28 @@ void LCodeGen::CalcKeyedArrayBaseRegister(Register base, // can be tagged, so that case must be handled here, too. if (key_is_tagged) { __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift)); + if (representation.IsInteger32()) { + ASSERT(elements_kind == FAST_SMI_ELEMENTS); + // Read or write only the most-significant 32 bits in the case of fast smi + // arrays. + return UntagSmiFieldMemOperand(base, additional_index); + } else { + return FieldMemOperand(base, additional_index); + } } else { // Sign extend key because it could be a 32-bit negative value or contain // garbage in the top 32-bits. The address computation happens in 64-bit. ASSERT((element_size_shift >= 0) && (element_size_shift <= 4)); - __ Add(base, elements, Operand(key, SXTW, element_size_shift)); + if (representation.IsInteger32()) { + ASSERT(elements_kind == FAST_SMI_ELEMENTS); + // Read or write only the most-significant 32 bits in the case of fast smi + // arrays. + __ Add(base, elements, Operand(key, SXTW, element_size_shift)); + return UntagSmiFieldMemOperand(base, additional_index); + } else { + __ Add(base, elements, additional_index - kHeapObjectTag); + return MemOperand(base, key, SXTW, element_size_shift); + } } } @@ -3521,8 +3527,7 @@ void LCodeGen::CalcKeyedArrayBaseRegister(Register base, void LCodeGen::DoLoadKeyedFixedDouble(LLoadKeyedFixedDouble* instr) { Register elements = ToRegister(instr->elements()); DoubleRegister result = ToDoubleRegister(instr->result()); - Register load_base; - int offset = 0; + MemOperand mem_op; if (instr->key()->IsConstantOperand()) { ASSERT(instr->hydrogen()->RequiresHoleCheck() || @@ -3532,27 +3537,30 @@ void LCodeGen::DoLoadKeyedFixedDouble(LLoadKeyedFixedDouble* instr) { if (constant_key & 0xf0000000) { Abort(kArrayIndexConstantValueTooBig); } - offset = FixedDoubleArray::OffsetOfElementAt(constant_key + - instr->additional_index()); - load_base = elements; + int offset = FixedDoubleArray::OffsetOfElementAt(constant_key + + instr->additional_index()); + mem_op = FieldMemOperand(elements, offset); } else { - load_base = ToRegister(instr->temp()); + Register load_base = ToRegister(instr->temp()); Register key = ToRegister(instr->key()); bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); - CalcKeyedArrayBaseRegister(load_base, elements, key, key_is_tagged, - instr->hydrogen()->elements_kind()); - offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); + int offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); + mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged, + instr->hydrogen()->elements_kind(), + instr->hydrogen()->representation(), + offset); } - __ Ldr(result, FieldMemOperand(load_base, offset)); + + __ Ldr(result, mem_op); if (instr->hydrogen()->RequiresHoleCheck()) { Register scratch = ToRegister(instr->temp()); - - // TODO(all): Is it faster to reload this value to an integer register, or - // move from fp to integer? - __ Fmov(scratch, result); - __ Cmp(scratch, kHoleNanInt64); - DeoptimizeIf(eq, instr->environment()); + // Detect the hole NaN by adding one to the integer representation of the + // result, and checking for overflow. + STATIC_ASSERT(kHoleNanInt64 == 0x7fffffffffffffff); + __ Ldr(scratch, mem_op); + __ Cmn(scratch, 1); + DeoptimizeIf(vs, instr->environment()); } } @@ -3560,35 +3568,35 @@ void LCodeGen::DoLoadKeyedFixedDouble(LLoadKeyedFixedDouble* instr) { void LCodeGen::DoLoadKeyedFixed(LLoadKeyedFixed* instr) { Register elements = ToRegister(instr->elements()); Register result = ToRegister(instr->result()); - Register load_base; - int offset = 0; + MemOperand mem_op; + Representation representation = instr->hydrogen()->representation(); if (instr->key()->IsConstantOperand()) { ASSERT(instr->temp() == NULL); LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); - offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + - instr->additional_index()); - load_base = elements; + int offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + + instr->additional_index()); + if (representation.IsInteger32()) { + ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); + STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && + (kSmiTag == 0)); + mem_op = UntagSmiFieldMemOperand(elements, offset); + } else { + mem_op = FieldMemOperand(elements, offset); + } } else { - load_base = ToRegister(instr->temp()); + Register load_base = ToRegister(instr->temp()); Register key = ToRegister(instr->key()); bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); - CalcKeyedArrayBaseRegister(load_base, elements, key, key_is_tagged, - instr->hydrogen()->elements_kind()); - offset = FixedArray::OffsetOfElementAt(instr->additional_index()); - } - Representation representation = instr->hydrogen()->representation(); + int offset = FixedArray::OffsetOfElementAt(instr->additional_index()); - if (representation.IsInteger32() && - instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS) { - STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0); - __ Load(result, UntagSmiFieldMemOperand(load_base, offset), - Representation::Integer32()); - } else { - __ Load(result, FieldMemOperand(load_base, offset), - representation); + mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged, + instr->hydrogen()->elements_kind(), + representation, offset); } + __ Load(result, mem_op, representation); + if (instr->hydrogen()->RequiresHoleCheck()) { if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { DeoptimizeIfNotSmi(result, instr->environment()); @@ -3688,10 +3696,8 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) { : ToRegister32(instr->value()); Register result = r.IsSmi() ? ToRegister(instr->result()) : ToRegister32(instr->result()); - Label done; - __ Abs(result, input, NULL, &done); - Deoptimize(instr->environment()); - __ Bind(&done); + __ Abs(result, input); + DeoptimizeIf(vs, instr->environment()); } } @@ -3832,9 +3838,15 @@ void LCodeGen::DoMathExp(LMathExp* instr) { } -void LCodeGen::DoMathFloor(LMathFloor* instr) { - // TODO(jbramley): If we could provide a double result, we could use frintm - // and produce a valid double result in a single instruction. +void LCodeGen::DoMathFloorD(LMathFloorD* instr) { + DoubleRegister input = ToDoubleRegister(instr->value()); + DoubleRegister result = ToDoubleRegister(instr->result()); + + __ Frintm(result, input); +} + + +void LCodeGen::DoMathFloorI(LMathFloorI* instr) { DoubleRegister input = ToDoubleRegister(instr->value()); Register result = ToRegister(instr->result()); @@ -3868,22 +3880,28 @@ void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { } // If the divisor is negative, we have to negate and handle edge cases. - Label not_kmin_int, done; __ Negs(result, dividend); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(eq, instr->environment()); } - if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { - // Note that we could emit branch-free code, but that would need one more - // register. - if (divisor == -1) { - DeoptimizeIf(vs, instr->environment()); - } else { - __ B(vc, ¬_kmin_int); - __ Mov(result, kMinInt / divisor); - __ B(&done); - } + + // If the negation could not overflow, simply shifting is OK. + if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { + __ Mov(result, Operand(dividend, ASR, shift)); + return; + } + + // Dividing by -1 is basically negation, unless we overflow. + if (divisor == -1) { + DeoptimizeIf(vs, instr->environment()); + return; } + + // Using a conditional data processing instruction would need 1 more register. + Label not_kmin_int, done; + __ B(vc, ¬_kmin_int); + __ Mov(result, kMinInt / divisor); + __ B(&done); __ bind(¬_kmin_int); __ Mov(result, Operand(dividend, ASR, shift)); __ bind(&done); @@ -3936,6 +3954,7 @@ void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { Register dividend = ToRegister32(instr->dividend()); Register divisor = ToRegister32(instr->divisor()); @@ -4034,7 +4053,7 @@ void LCodeGen::DoPower(LPower* instr) { ASSERT(ToDoubleRegister(instr->result()).is(d0)); if (exponent_type.IsSmi()) { - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsTagged()) { Label no_deopt; @@ -4043,25 +4062,54 @@ void LCodeGen::DoPower(LPower* instr) { DeoptimizeIfNotRoot(x0, Heap::kHeapNumberMapRootIndex, instr->environment()); __ Bind(&no_deopt); - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsInteger32()) { // Ensure integer exponent has no garbage in top 32-bits, as MathPowStub // supports large integer exponents. Register exponent = ToRegister(instr->right()); __ Sxtw(exponent, exponent); - MathPowStub stub(MathPowStub::INTEGER); + MathPowStub stub(isolate(), MathPowStub::INTEGER); __ CallStub(&stub); } else { ASSERT(exponent_type.IsDouble()); - MathPowStub stub(MathPowStub::DOUBLE); + MathPowStub stub(isolate(), MathPowStub::DOUBLE); __ CallStub(&stub); } } -void LCodeGen::DoMathRound(LMathRound* instr) { - // TODO(jbramley): We could provide a double result here using frint. +void LCodeGen::DoMathRoundD(LMathRoundD* instr) { + DoubleRegister input = ToDoubleRegister(instr->value()); + DoubleRegister result = ToDoubleRegister(instr->result()); + DoubleRegister scratch_d = double_scratch(); + + ASSERT(!AreAliased(input, result, scratch_d)); + + Label done; + + __ Frinta(result, input); + __ Fcmp(input, 0.0); + __ Fccmp(result, input, ZFlag, lt); + // The result is correct if the input was in [-0, +infinity], or was a + // negative integral value. + __ B(eq, &done); + + // Here the input is negative, non integral, with an exponent lower than 52. + // We do not have to worry about the 0.49999999999999994 (0x3fdfffffffffffff) + // case. So we can safely add 0.5. + __ Fmov(scratch_d, 0.5); + __ Fadd(result, input, scratch_d); + __ Frintm(result, result); + // The range [-0.5, -0.0[ yielded +0.0. Force the sign to negative. + __ Fabs(result, result); + __ Fneg(result, result); + + __ Bind(&done); +} + + +void LCodeGen::DoMathRoundI(LMathRoundI* instr) { DoubleRegister input = ToDoubleRegister(instr->value()); DoubleRegister temp1 = ToDoubleRegister(instr->temp1()); Register result = ToRegister(instr->result()); @@ -4100,7 +4148,7 @@ void LCodeGen::DoMathRound(LMathRound* instr) { // Since we're providing a 32-bit result, we can implement ties-to-infinity by // adding 0.5 to the input, then taking the floor of the result. This does not // work for very large positive doubles because adding 0.5 would cause an - // intermediate rounding stage, so a different approach will be necessary if a + // intermediate rounding stage, so a different approach is necessary when a // double result is needed. __ Fadd(temp1, input, dot_five); __ Fcvtms(result, temp1); @@ -4221,25 +4269,16 @@ void LCodeGen::DoModI(LModI* instr) { Register divisor = ToRegister32(instr->right()); Register result = ToRegister32(instr->result()); - Label deopt, done; + Label done; // modulo = dividend - quotient * divisor __ Sdiv(result, dividend, divisor); if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { - // Combine the deoptimization sites. - Label ok; - __ Cbnz(divisor, &ok); - __ Bind(&deopt); - Deoptimize(instr->environment()); - __ Bind(&ok); + DeoptimizeIfZero(divisor, instr->environment()); } __ Msub(result, result, divisor, dividend); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { __ Cbnz(result, &done); - if (deopt.is_bound()) { // TODO(all) This is a hack, remove this... - __ Tbnz(dividend, kWSignBit, &deopt); - } else { - DeoptimizeIfNegative(dividend, instr->environment()); - } + DeoptimizeIfNegative(dividend, instr->environment()); } __ Bind(&done); } @@ -4648,6 +4687,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { Abort(kDoPushArgumentNotImplementedForDoubleType); } else { __ Push(ToRegister(argument)); + after_push_argument_ = true; } } @@ -4703,13 +4743,13 @@ MemOperand LCodeGen::BuildSeqStringOperand(Register string, return FieldMemOperand(string, SeqString::kHeaderSize + offset); } + __ Add(temp, string, SeqString::kHeaderSize - kHeapObjectTag); if (encoding == String::ONE_BYTE_ENCODING) { - __ Add(temp, string, Operand(ToRegister32(index), SXTW)); + return MemOperand(temp, ToRegister32(index), SXTW); } else { STATIC_ASSERT(kUC16Size == 2); - __ Add(temp, string, Operand(ToRegister32(index), SXTW, 1)); + return MemOperand(temp, ToRegister32(index), SXTW, 1); } - return FieldMemOperand(temp, SeqString::kHeaderSize); } @@ -4826,7 +4866,7 @@ void LCodeGen::DoShiftI(LShiftI* instr) { } } else { ASSERT(right_op->IsConstantOperand()); - int shift_count = ToInteger32(LConstantOperand::cast(right_op)) & 0x1f; + int shift_count = JSShiftAmountFromLConstant(right_op); if (shift_count == 0) { if ((instr->op() == Token::SHR) && instr->can_deopt()) { DeoptimizeIfNegative(left, instr->environment()); @@ -4889,7 +4929,7 @@ void LCodeGen::DoShiftS(LShiftS* instr) { } } else { ASSERT(right_op->IsConstantOperand()); - int shift_count = ToInteger32(LConstantOperand::cast(right_op)) & 0x1f; + int shift_count = JSShiftAmountFromLConstant(right_op); if (shift_count == 0) { if ((instr->op() == Token::SHR) && instr->can_deopt()) { DeoptimizeIfNegative(left, instr->environment()); @@ -5152,34 +5192,32 @@ void LCodeGen::DoStoreKeyedExternal(LStoreKeyedExternal* instr) { void LCodeGen::DoStoreKeyedFixedDouble(LStoreKeyedFixedDouble* instr) { Register elements = ToRegister(instr->elements()); DoubleRegister value = ToDoubleRegister(instr->value()); - Register store_base = no_reg; - int offset = 0; + MemOperand mem_op; if (instr->key()->IsConstantOperand()) { int constant_key = ToInteger32(LConstantOperand::cast(instr->key())); if (constant_key & 0xf0000000) { Abort(kArrayIndexConstantValueTooBig); } - offset = FixedDoubleArray::OffsetOfElementAt(constant_key + - instr->additional_index()); - store_base = elements; + int offset = FixedDoubleArray::OffsetOfElementAt(constant_key + + instr->additional_index()); + mem_op = FieldMemOperand(elements, offset); } else { - store_base = ToRegister(instr->temp()); + Register store_base = ToRegister(instr->temp()); Register key = ToRegister(instr->key()); bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); - CalcKeyedArrayBaseRegister(store_base, elements, key, key_is_tagged, - instr->hydrogen()->elements_kind()); - offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); + int offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); + mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged, + instr->hydrogen()->elements_kind(), + instr->hydrogen()->representation(), + offset); } if (instr->NeedsCanonicalization()) { - DoubleRegister dbl_scratch = double_scratch(); - __ Fmov(dbl_scratch, - FixedDoubleArray::canonical_not_the_hole_nan_as_double()); - __ Fmaxnm(dbl_scratch, dbl_scratch, value); - __ Str(dbl_scratch, FieldMemOperand(store_base, offset)); + __ CanonicalizeNaN(double_scratch(), value); + __ Str(double_scratch(), mem_op); } else { - __ Str(value, FieldMemOperand(store_base, offset)); + __ Str(value, mem_op); } } @@ -5190,37 +5228,41 @@ void LCodeGen::DoStoreKeyedFixed(LStoreKeyedFixed* instr) { Register scratch = no_reg; Register store_base = no_reg; Register key = no_reg; - int offset = 0; + MemOperand mem_op; if (!instr->key()->IsConstantOperand() || instr->hydrogen()->NeedsWriteBarrier()) { scratch = ToRegister(instr->temp()); } + Representation representation = instr->hydrogen()->value()->representation(); if (instr->key()->IsConstantOperand()) { LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); - offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + - instr->additional_index()); + int offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + + instr->additional_index()); store_base = elements; + if (representation.IsInteger32()) { + ASSERT(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); + ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); + STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && + (kSmiTag == 0)); + mem_op = UntagSmiFieldMemOperand(store_base, offset); + } else { + mem_op = FieldMemOperand(store_base, offset); + } } else { store_base = scratch; key = ToRegister(instr->key()); bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); - CalcKeyedArrayBaseRegister(store_base, elements, key, key_is_tagged, - instr->hydrogen()->elements_kind()); - offset = FixedArray::OffsetOfElementAt(instr->additional_index()); - } - Representation representation = instr->hydrogen()->value()->representation(); - if (representation.IsInteger32()) { - ASSERT(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); - ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); - STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0); - __ Store(value, UntagSmiFieldMemOperand(store_base, offset), - Representation::Integer32()); - } else { - __ Store(value, FieldMemOperand(store_base, offset), representation); + int offset = FixedArray::OffsetOfElementAt(instr->additional_index()); + + mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged, + instr->hydrogen()->elements_kind(), + representation, offset); } + __ Store(value, mem_op, representation); + if (instr->hydrogen()->NeedsWriteBarrier()) { ASSERT(representation.IsTagged()); // This assignment may cause element_addr to alias store_base. @@ -5229,7 +5271,7 @@ void LCodeGen::DoStoreKeyedFixed(LStoreKeyedFixed* instr) { instr->hydrogen()->value()->IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; // Compute address of modified element and store it into key register. - __ Add(element_addr, store_base, offset - kHeapObjectTag); + __ Add(element_addr, mem_op.base(), mem_op.OffsetAsOperand()); __ RecordWrite(elements, element_addr, value, GetLinkRegisterState(), kSaveFPRegs, EMIT_REMEMBERED_SET, check_needed); } @@ -5254,18 +5296,17 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { Register object = ToRegister(instr->object()); HObjectAccess access = instr->hydrogen()->access(); - Handle<Map> transition = instr->transition(); int offset = access.offset(); if (access.IsExternalMemory()) { - ASSERT(transition.is_null()); + ASSERT(!instr->hydrogen()->has_transition()); ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); Register value = ToRegister(instr->value()); __ Store(value, MemOperand(object, offset), representation); return; } else if (representation.IsDouble()) { - ASSERT(transition.is_null()); ASSERT(access.IsInobject()); + ASSERT(!instr->hydrogen()->has_transition()); ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); FPRegister value = ToDoubleRegister(instr->value()); __ Str(value, FieldMemOperand(object, offset)); @@ -5284,11 +5325,13 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { !instr->hydrogen()->value()->type().IsHeapObject()) { DeoptimizeIfSmi(value, instr->environment()); - // We know that value is a smi now, so we can omit the check below. + // We know now that value is not a smi, so we can omit the check below. check_needed = OMIT_SMI_CHECK; } - if (!transition.is_null()) { + if (instr->hydrogen()->has_transition()) { + Handle<Map> transition = instr->hydrogen()->transition_map(); + AddDeprecationDependency(transition); // Store the new map value. Register new_map_value = ToRegister(instr->temp0()); __ Mov(new_map_value, Operand(transition)); @@ -5365,9 +5408,10 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) { ASSERT(ToRegister(instr->context()).is(cp)); ASSERT(ToRegister(instr->left()).Is(x1)); ASSERT(ToRegister(instr->right()).Is(x0)); - StringAddStub stub(instr->hydrogen()->flags(), + StringAddStub stub(isolate(), + instr->hydrogen()->flags(), instr->hydrogen()->pretenure_flag()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -5440,8 +5484,8 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { __ Cmp(char_code, String::kMaxOneByteCharCode); __ B(hi, deferred->entry()); __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); - __ Add(result, result, Operand(char_code, SXTW, kPointerSizeLog2)); - __ Ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize)); + __ Add(result, result, FixedArray::kHeaderSize - kHeapObjectTag); + __ Ldr(result, MemOperand(result, char_code, SXTW, kPointerSizeLog2)); __ CompareRoot(result, Heap::kUndefinedValueRootIndex); __ B(eq, deferred->entry()); __ Bind(deferred->exit()); @@ -5483,7 +5527,8 @@ void LCodeGen::DoSubI(LSubI* instr) { bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); Register result = ToRegister32(instr->result()); Register left = ToRegister32(instr->left()); - Operand right = ToOperand32I(instr->right()); + Operand right = ToShiftedRightOperand32I(instr->right(), instr); + if (can_overflow) { __ Subs(result, left, right); DeoptimizeIf(vs, instr->environment()); @@ -5557,7 +5602,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr, // A heap number: load value and convert to int32 using non-truncating // function. If the result is out of range, branch to deoptimize. __ Ldr(dbl_scratch1, FieldMemOperand(input, HeapNumber::kValueOffset)); - __ TryConvertDoubleToInt32(output, dbl_scratch1, dbl_scratch2); + __ TryRepresentDoubleAsInt32(output, dbl_scratch1, dbl_scratch2); DeoptimizeIf(ne, instr->environment()); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { @@ -5659,7 +5704,6 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { Register object = ToRegister(instr->object()); - Register temp1 = ToRegister(instr->temp1()); Handle<Map> from_map = instr->original_map(); Handle<Map> to_map = instr->transitioned_map(); @@ -5667,26 +5711,34 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { ElementsKind to_kind = instr->to_kind(); Label not_applicable; - __ CheckMap(object, temp1, from_map, ¬_applicable, DONT_DO_SMI_CHECK); if (IsSimpleMapChangeTransition(from_kind, to_kind)) { + Register temp1 = ToRegister(instr->temp1()); Register new_map = ToRegister(instr->temp2()); + __ CheckMap(object, temp1, from_map, ¬_applicable, DONT_DO_SMI_CHECK); __ Mov(new_map, Operand(to_map)); __ Str(new_map, FieldMemOperand(object, HeapObject::kMapOffset)); // Write barrier. __ RecordWriteField(object, HeapObject::kMapOffset, new_map, temp1, GetLinkRegisterState(), kDontSaveFPRegs); } else { + { + UseScratchRegisterScope temps(masm()); + // Use the temp register only in a restricted scope - the codegen checks + // that we do not use any register across a call. + __ CheckMap(object, temps.AcquireX(), from_map, ¬_applicable, + DONT_DO_SMI_CHECK); + } + ASSERT(object.is(x0)); ASSERT(ToRegister(instr->context()).is(cp)); PushSafepointRegistersScope scope( this, Safepoint::kWithRegistersAndDoubles); - __ Mov(x0, object); __ Mov(x1, Operand(to_map)); bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; - TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); + TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); __ CallStub(&stub); RecordSafepointWithRegistersAndDoubles( - instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); + instr->pointer_map(), 0, Safepoint::kLazyDeopt); } __ Bind(¬_applicable); } @@ -5698,8 +5750,8 @@ void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { Register temp2 = ToRegister(instr->temp2()); Label no_memento_found; - __ JumpIfJSArrayHasAllocationMemento(object, temp1, temp2, &no_memento_found); - Deoptimize(instr->environment()); + __ TestJSArrayForAllocationMemento(object, temp1, temp2, &no_memento_found); + DeoptimizeIf(eq, instr->environment()); __ Bind(&no_memento_found); } @@ -5727,7 +5779,8 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { Label* false_label = instr->FalseLabel(chunk_); Register value = ToRegister(instr->value()); - if (type_name->Equals(heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(type_name, factory->number_string())) { ASSERT(instr->temp1() != NULL); Register map = ToRegister(instr->temp1()); @@ -5736,7 +5789,7 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); EmitBranch(instr, eq); - } else if (type_name->Equals(heap()->string_string())) { + } else if (String::Equals(type_name, factory->string_string())) { ASSERT((instr->temp1() != NULL) && (instr->temp2() != NULL)); Register map = ToRegister(instr->temp1()); Register scratch = ToRegister(instr->temp2()); @@ -5747,7 +5800,7 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); EmitTestAndBranch(instr, eq, scratch, 1 << Map::kIsUndetectable); - } else if (type_name->Equals(heap()->symbol_string())) { + } else if (String::Equals(type_name, factory->symbol_string())) { ASSERT((instr->temp1() != NULL) && (instr->temp2() != NULL)); Register map = ToRegister(instr->temp1()); Register scratch = ToRegister(instr->temp2()); @@ -5756,16 +5809,17 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { __ CompareObjectType(value, map, scratch, SYMBOL_TYPE); EmitBranch(instr, eq); - } else if (type_name->Equals(heap()->boolean_string())) { + } else if (String::Equals(type_name, factory->boolean_string())) { __ JumpIfRoot(value, Heap::kTrueValueRootIndex, true_label); __ CompareRoot(value, Heap::kFalseValueRootIndex); EmitBranch(instr, eq); - } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { + } else if (FLAG_harmony_typeof && + String::Equals(type_name, factory->null_string())) { __ CompareRoot(value, Heap::kNullValueRootIndex); EmitBranch(instr, eq); - } else if (type_name->Equals(heap()->undefined_string())) { + } else if (String::Equals(type_name, factory->undefined_string())) { ASSERT(instr->temp1() != NULL); Register scratch = ToRegister(instr->temp1()); @@ -5776,7 +5830,7 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { __ Ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); EmitTestAndBranch(instr, ne, scratch, 1 << Map::kIsUndetectable); - } else if (type_name->Equals(heap()->function_string())) { + } else if (String::Equals(type_name, factory->function_string())) { STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); ASSERT(instr->temp1() != NULL); Register type = ToRegister(instr->temp1()); @@ -5786,7 +5840,7 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { // HeapObject's type has been loaded into type register by JumpIfObjectType. EmitCompareAndBranch(instr, eq, type, JS_FUNCTION_PROXY_TYPE); - } else if (type_name->Equals(heap()->object_string())) { + } else if (String::Equals(type_name, factory->object_string())) { ASSERT((instr->temp1() != NULL) && (instr->temp2() != NULL)); Register map = ToRegister(instr->temp1()); Register scratch = ToRegister(instr->temp2()); @@ -5832,7 +5886,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { // If the receiver is null or undefined, we have to pass the global object as // a receiver to normal functions. Values have to be passed unchanged to // builtins and strict-mode functions. - Label global_object, done, deopt; + Label global_object, done, copy_receiver; if (!instr->hydrogen()->known_function()) { __ Ldr(result, FieldMemOperand(function, @@ -5843,10 +5897,10 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { FieldMemOperand(result, SharedFunctionInfo::kCompilerHintsOffset)); // Do not transform the receiver to object for strict mode functions. - __ Tbnz(result, SharedFunctionInfo::kStrictModeFunction, &done); + __ Tbnz(result, SharedFunctionInfo::kStrictModeFunction, ©_receiver); // Do not transform the receiver to object for builtins. - __ Tbnz(result, SharedFunctionInfo::kNative, &done); + __ Tbnz(result, SharedFunctionInfo::kNative, ©_receiver); } // Normal function. Replace undefined or null with global receiver. @@ -5854,32 +5908,78 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, &global_object); // Deoptimize if the receiver is not a JS object. - __ JumpIfSmi(receiver, &deopt); + DeoptimizeIfSmi(receiver, instr->environment()); __ CompareObjectType(receiver, result, result, FIRST_SPEC_OBJECT_TYPE); - __ Mov(result, receiver); - __ B(ge, &done); - // Otherwise, fall through to deopt. - - __ Bind(&deopt); + __ B(ge, ©_receiver); Deoptimize(instr->environment()); __ Bind(&global_object); __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset)); __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX)); __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset)); + __ B(&done); + __ Bind(©_receiver); + __ Mov(result, receiver); __ Bind(&done); } +void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) { + PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); + __ Push(object); + __ Push(index); + __ Mov(cp, 0); + __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); + RecordSafepointWithRegisters( + instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); + __ StoreToSafepointRegisterSlot(x0, result); +} + + void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { + class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { + public: + DeferredLoadMutableDouble(LCodeGen* codegen, + LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) + : LDeferredCode(codegen), + instr_(instr), + result_(result), + object_(object), + index_(index) { + } + virtual void Generate() V8_OVERRIDE { + codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); + } + virtual LInstruction* instr() V8_OVERRIDE { return instr_; } + private: + LLoadFieldByIndex* instr_; + Register result_; + Register object_; + Register index_; + }; Register object = ToRegister(instr->object()); Register index = ToRegister(instr->index()); Register result = ToRegister(instr->result()); __ AssertSmi(index); + DeferredLoadMutableDouble* deferred; + deferred = new(zone()) DeferredLoadMutableDouble( + this, instr, result, object, index); + Label out_of_object, done; + + __ TestAndBranchIfAnySet( + index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry()); + __ Mov(index, Operand(index, ASR, 1)); + __ Cmp(index, Smi::FromInt(0)); __ B(lt, &out_of_object); @@ -5895,6 +5995,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); __ Ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize - kPointerSize)); + __ Bind(deferred->exit()); __ Bind(&done); } diff --git a/deps/v8/src/arm64/lithium-codegen-arm64.h b/deps/v8/src/arm64/lithium-codegen-arm64.h index b1d8b70d5..8c25e6340 100644 --- a/deps/v8/src/arm64/lithium-codegen-arm64.h +++ b/deps/v8/src/arm64/lithium-codegen-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ @@ -35,7 +12,7 @@ #include "lithium-codegen.h" #include "safepoint-table.h" #include "scopes.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { @@ -60,10 +37,16 @@ class LCodeGen: public LCodeGenBase { frame_is_built_(false), safepoints_(info->zone()), resolver_(this), - expected_safepoint_kind_(Safepoint::kSimple) { + expected_safepoint_kind_(Safepoint::kSimple), + after_push_argument_(false), + inlined_arguments_(false) { PopulateDeoptimizationLiteralsWithInlinedFunctions(); } + ~LCodeGen() { + ASSERT(!after_push_argument_ || inlined_arguments_); + } + // Simple accessors. Scope* scope() const { return scope_; } @@ -98,6 +81,7 @@ class LCodeGen: public LCodeGenBase { // information on it. void FinishCode(Handle<Code> code); + enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; // Support for converting LOperands to assembler types. // LOperand must be a register. Register ToRegister(LOperand* op) const; @@ -105,9 +89,30 @@ class LCodeGen: public LCodeGenBase { Operand ToOperand(LOperand* op); Operand ToOperand32I(LOperand* op); Operand ToOperand32U(LOperand* op); - MemOperand ToMemOperand(LOperand* op) const; + enum StackMode { kMustUseFramePointer, kCanUseStackPointer }; + MemOperand ToMemOperand(LOperand* op, + StackMode stack_mode = kCanUseStackPointer) const; Handle<Object> ToHandle(LConstantOperand* op) const; + template<class LI> + Operand ToShiftedRightOperand32I(LOperand* right, + LI* shift_info) { + return ToShiftedRightOperand32(right, shift_info, SIGNED_INT32); + } + template<class LI> + Operand ToShiftedRightOperand32U(LOperand* right, + LI* shift_info) { + return ToShiftedRightOperand32(right, shift_info, UNSIGNED_INT32); + } + template<class LI> + Operand ToShiftedRightOperand32(LOperand* right, + LI* shift_info, + IntegerSignedness signedness); + + int JSShiftAmountFromLConstant(LOperand* constant) { + return ToInteger32(LConstantOperand::cast(constant)) & 0x1f; + } + // TODO(jbramley): Examine these helpers and check that they make sense. // IsInteger32Constant returns true for smi constants, for example. bool IsInteger32Constant(LConstantOperand* op) const; @@ -137,7 +142,6 @@ class LCodeGen: public LCodeGenBase { Label* exit, Label* allocation_entry); - enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; void DoDeferredNumberTagU(LInstruction* instr, LOperand* value, LOperand* temp1, @@ -149,6 +153,10 @@ class LCodeGen: public LCodeGenBase { void DoDeferredAllocate(LAllocate* instr); void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); + void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index); Operand ToOperand32(LOperand* op, IntegerSignedness signedness); @@ -224,7 +232,7 @@ class LCodeGen: public LCodeGenBase { Deoptimizer::BailoutType* override_bailout_type = NULL); void Deoptimize(LEnvironment* environment, Deoptimizer::BailoutType* override_bailout_type = NULL); - void DeoptimizeIf(Condition cc, LEnvironment* environment); + void DeoptimizeIf(Condition cond, LEnvironment* environment); void DeoptimizeIfZero(Register rt, LEnvironment* environment); void DeoptimizeIfNotZero(Register rt, LEnvironment* environment); void DeoptimizeIfNegative(Register rt, LEnvironment* environment); @@ -239,7 +247,6 @@ class LCodeGen: public LCodeGenBase { void DeoptimizeIfMinusZero(DoubleRegister input, LEnvironment* environment); void DeoptimizeIfBitSet(Register rt, int bit, LEnvironment* environment); void DeoptimizeIfBitClear(Register rt, int bit, LEnvironment* environment); - void ApplyCheckIf(Condition cc, LBoundsCheck* check); MemOperand PrepareKeyedExternalArrayOperand(Register key, Register base, @@ -249,19 +256,19 @@ class LCodeGen: public LCodeGenBase { int constant_key, ElementsKind elements_kind, int additional_index); - void CalcKeyedArrayBaseRegister(Register base, - Register elements, - Register key, - bool key_is_tagged, - ElementsKind elements_kind); + MemOperand PrepareKeyedArrayOperand(Register base, + Register elements, + Register key, + bool key_is_tagged, + ElementsKind elements_kind, + Representation representation, + int additional_index); void RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode); int GetStackSlotCount() const { return chunk()->spill_slot_count(); } - void Abort(BailoutReason reason); - void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } // Emit frame translation commands for an environment. @@ -368,6 +375,15 @@ class LCodeGen: public LCodeGenBase { Safepoint::Kind expected_safepoint_kind_; + // This flag is true when we are after a push (but before a call). + // In this situation, jssp no longer references the end of the stack slots so, + // we can only reference a stack slot via fp. + bool after_push_argument_; + // If we have inlined arguments, we are no longer able to use jssp because + // jssp is modified and we never know if we are in a block after or before + // the pop of the arguments (which restores jssp). + bool inlined_arguments_; + int old_position_; class PushSafepointRegistersScope BASE_EMBEDDED { @@ -387,12 +403,12 @@ class LCodeGen: public LCodeGenBase { codegen_->masm_->Mov(to_be_pushed_lr, lr); switch (codegen_->expected_safepoint_kind_) { case Safepoint::kWithRegisters: { - StoreRegistersStateStub stub(kDontSaveFPRegs); + StoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs); codegen_->masm_->CallStub(&stub); break; } case Safepoint::kWithRegistersAndDoubles: { - StoreRegistersStateStub stub(kSaveFPRegs); + StoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs); codegen_->masm_->CallStub(&stub); break; } @@ -406,12 +422,12 @@ class LCodeGen: public LCodeGenBase { ASSERT((kind & Safepoint::kWithRegisters) != 0); switch (kind) { case Safepoint::kWithRegisters: { - RestoreRegistersStateStub stub(kDontSaveFPRegs); + RestoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs); codegen_->masm_->CallStub(&stub); break; } case Safepoint::kWithRegistersAndDoubles: { - RestoreRegistersStateStub stub(kSaveFPRegs); + RestoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs); codegen_->masm_->CallStub(&stub); break; } diff --git a/deps/v8/src/arm64/lithium-gap-resolver-arm64.cc b/deps/v8/src/arm64/lithium-gap-resolver-arm64.cc index f0a2e6bd0..c721cb48a 100644 --- a/deps/v8/src/arm64/lithium-gap-resolver-arm64.cc +++ b/deps/v8/src/arm64/lithium-gap-resolver-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/arm64/lithium-gap-resolver-arm64.h b/deps/v8/src/arm64/lithium-gap-resolver-arm64.h index d1637b65a..ae6719073 100644 --- a/deps/v8/src/arm64/lithium-gap-resolver-arm64.h +++ b/deps/v8/src/arm64/lithium-gap-resolver-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_LITHIUM_GAP_RESOLVER_ARM64_H_ #define V8_ARM64_LITHIUM_GAP_RESOLVER_ARM64_H_ diff --git a/deps/v8/src/arm64/macro-assembler-arm64-inl.h b/deps/v8/src/arm64/macro-assembler-arm64-inl.h index d660d3601..7c9258a9c 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64-inl.h +++ b/deps/v8/src/arm64/macro-assembler-arm64-inl.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ #define V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ @@ -319,13 +296,6 @@ LS_MACRO_LIST(DEFINE_FUNCTION) #undef DEFINE_FUNCTION -void MacroAssembler::Adr(const Register& rd, Label* label) { - ASSERT(allow_macro_instructions_); - ASSERT(!rd.IsZero()); - adr(rd, label); -} - - void MacroAssembler::Asr(const Register& rd, const Register& rn, unsigned shift) { @@ -833,6 +803,12 @@ void MacroAssembler::Frinta(const FPRegister& fd, const FPRegister& fn) { } +void MacroAssembler::Frintm(const FPRegister& fd, const FPRegister& fn) { + ASSERT(allow_macro_instructions_); + frintm(fd, fn); +} + + void MacroAssembler::Frintn(const FPRegister& fd, const FPRegister& fn) { ASSERT(allow_macro_instructions_); frintn(fd, fn); @@ -1001,7 +977,6 @@ void MacroAssembler::Mrs(const Register& rt, SystemRegister sysreg) { void MacroAssembler::Msr(SystemRegister sysreg, const Register& rt) { ASSERT(allow_macro_instructions_); - ASSERT(!rt.IsZero()); msr(sysreg, rt); } @@ -1409,6 +1384,30 @@ void MacroAssembler::JumpIfBothNotSmi(Register value1, } +void MacroAssembler::ObjectTag(Register tagged_obj, Register obj) { + STATIC_ASSERT(kHeapObjectTag == 1); + if (emit_debug_code()) { + Label ok; + Tbz(obj, 0, &ok); + Abort(kObjectTagged); + Bind(&ok); + } + Orr(tagged_obj, obj, kHeapObjectTag); +} + + +void MacroAssembler::ObjectUntag(Register untagged_obj, Register obj) { + STATIC_ASSERT(kHeapObjectTag == 1); + if (emit_debug_code()) { + Label ok; + Tbnz(obj, 0, &ok); + Abort(kObjectNotTagged); + Bind(&ok); + } + Bic(untagged_obj, obj, kHeapObjectTag); +} + + void MacroAssembler::IsObjectNameType(Register object, Register type, Label* fail) { @@ -1489,12 +1488,9 @@ void MacroAssembler::Claim(uint64_t count, uint64_t unit_size) { void MacroAssembler::Claim(const Register& count, uint64_t unit_size) { + if (unit_size == 0) return; ASSERT(IsPowerOf2(unit_size)); - if (unit_size == 0) { - return; - } - const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits); const Operand size(count, LSL, shift); @@ -1511,7 +1507,7 @@ void MacroAssembler::Claim(const Register& count, uint64_t unit_size) { void MacroAssembler::ClaimBySMI(const Register& count_smi, uint64_t unit_size) { - ASSERT(IsPowerOf2(unit_size)); + ASSERT(unit_size == 0 || IsPowerOf2(unit_size)); const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift; const Operand size(count_smi, (shift >= 0) ? (LSL) : (LSR), @@ -1550,12 +1546,9 @@ void MacroAssembler::Drop(uint64_t count, uint64_t unit_size) { void MacroAssembler::Drop(const Register& count, uint64_t unit_size) { + if (unit_size == 0) return; ASSERT(IsPowerOf2(unit_size)); - if (unit_size == 0) { - return; - } - const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits); const Operand size(count, LSL, shift); @@ -1575,7 +1568,7 @@ void MacroAssembler::Drop(const Register& count, uint64_t unit_size) { void MacroAssembler::DropBySMI(const Register& count_smi, uint64_t unit_size) { - ASSERT(IsPowerOf2(unit_size)); + ASSERT(unit_size == 0 || IsPowerOf2(unit_size)); const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift; const Operand size(count_smi, (shift >= 0) ? (LSL) : (LSR), diff --git a/deps/v8/src/arm64/macro-assembler-arm64.cc b/deps/v8/src/arm64/macro-assembler-arm64.cc index 08ddb8782..c5ce99be9 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64.cc +++ b/deps/v8/src/arm64/macro-assembler-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -53,7 +30,9 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, #endif has_frame_(false), use_real_aborts_(true), - sp_(jssp), tmp_list_(ip0, ip1), fptmp_list_(fp_scratch) { + sp_(jssp), + tmp_list_(DefaultTmpList()), + fptmp_list_(DefaultFPTmpList()) { if (isolate() != NULL) { code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), isolate()); @@ -61,13 +40,23 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, } +CPURegList MacroAssembler::DefaultTmpList() { + return CPURegList(ip0, ip1); +} + + +CPURegList MacroAssembler::DefaultFPTmpList() { + return CPURegList(fp_scratch1, fp_scratch2); +} + + void MacroAssembler::LogicalMacro(const Register& rd, const Register& rn, const Operand& operand, LogicalOp op) { UseScratchRegisterScope temps(this); - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { Register temp = temps.AcquireX(); LoadRelocated(temp, operand); Logical(rd, rn, temp, op); @@ -258,7 +247,7 @@ void MacroAssembler::Mov(const Register& rd, UseScratchRegisterScope temps(this); Register dst = (rd.IsSP()) ? temps.AcquireSameSizeAs(rd) : rd; - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { LoadRelocated(dst, operand); } else if (operand.IsImmediate()) { @@ -306,7 +295,7 @@ void MacroAssembler::Mov(const Register& rd, void MacroAssembler::Mvn(const Register& rd, const Operand& operand) { ASSERT(allow_macro_instructions_); - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { LoadRelocated(rd, operand); mvn(rd, rd); @@ -361,7 +350,7 @@ void MacroAssembler::ConditionalCompareMacro(const Register& rn, Condition cond, ConditionalCompareOp op) { ASSERT((cond != al) && (cond != nv)); - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); LoadRelocated(temp, operand); @@ -427,12 +416,12 @@ void MacroAssembler::AddSubMacro(const Register& rd, FlagsUpdate S, AddSubOp op) { if (operand.IsZero() && rd.Is(rn) && rd.Is64Bits() && rn.Is64Bits() && - !operand.NeedsRelocation() && (S == LeaveFlags)) { + !operand.NeedsRelocation(isolate()) && (S == LeaveFlags)) { // The instruction would be a nop. Avoid generating useless code. return; } - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); LoadRelocated(temp, operand); @@ -458,7 +447,7 @@ void MacroAssembler::AddSubWithCarryMacro(const Register& rd, ASSERT(rd.SizeInBits() == rn.SizeInBits()); UseScratchRegisterScope temps(this); - if (operand.NeedsRelocation()) { + if (operand.NeedsRelocation(isolate())) { Register temp = temps.AcquireX(); LoadRelocated(temp, operand); AddSubWithCarryMacro(rd, rn, temp, S, op); @@ -599,6 +588,43 @@ bool MacroAssembler::NeedExtraInstructionsOrRegisterBranch( } +void MacroAssembler::Adr(const Register& rd, Label* label, AdrHint hint) { + ASSERT(allow_macro_instructions_); + ASSERT(!rd.IsZero()); + + if (hint == kAdrNear) { + adr(rd, label); + return; + } + + ASSERT(hint == kAdrFar); + UseScratchRegisterScope temps(this); + Register scratch = temps.AcquireX(); + ASSERT(!AreAliased(rd, scratch)); + + if (label->is_bound()) { + int label_offset = label->pos() - pc_offset(); + if (Instruction::IsValidPCRelOffset(label_offset)) { + adr(rd, label); + } else { + ASSERT(label_offset <= 0); + int min_adr_offset = -(1 << (Instruction::ImmPCRelRangeBitwidth - 1)); + adr(rd, min_adr_offset); + Add(rd, rd, label_offset - min_adr_offset); + } + } else { + InstructionAccurateScope scope( + this, PatchingAssembler::kAdrFarPatchableNInstrs); + adr(rd, label); + for (int i = 0; i < PatchingAssembler::kAdrFarPatchableNNops; ++i) { + nop(ADR_FAR_NOP); + } + movz(scratch, 0); + add(rd, rd, scratch); + } +} + + void MacroAssembler::B(Label* label, BranchType type, Register reg, int bit) { ASSERT((reg.Is(NoReg) || type >= kBranchTypeFirstUsingReg) && (bit == -1 || type >= kBranchTypeFirstUsingBit)); @@ -1196,7 +1222,65 @@ void MacroAssembler::AssertStackConsistency() { } -void MacroAssembler::LoadRoot(Register destination, +void MacroAssembler::AssertFPCRState(Register fpcr) { + if (emit_debug_code()) { + Label unexpected_mode, done; + UseScratchRegisterScope temps(this); + if (fpcr.IsNone()) { + fpcr = temps.AcquireX(); + Mrs(fpcr, FPCR); + } + + // Settings overridden by ConfiugreFPCR(): + // - Assert that default-NaN mode is set. + Tbz(fpcr, DN_offset, &unexpected_mode); + + // Settings left to their default values: + // - Assert that flush-to-zero is not set. + Tbnz(fpcr, FZ_offset, &unexpected_mode); + // - Assert that the rounding mode is nearest-with-ties-to-even. + STATIC_ASSERT(FPTieEven == 0); + Tst(fpcr, RMode_mask); + B(eq, &done); + + Bind(&unexpected_mode); + Abort(kUnexpectedFPCRMode); + + Bind(&done); + } +} + + +void MacroAssembler::ConfigureFPCR() { + UseScratchRegisterScope temps(this); + Register fpcr = temps.AcquireX(); + Mrs(fpcr, FPCR); + + // If necessary, enable default-NaN mode. The default values of the other FPCR + // options should be suitable, and AssertFPCRState will verify that. + Label no_write_required; + Tbnz(fpcr, DN_offset, &no_write_required); + + Orr(fpcr, fpcr, DN_mask); + Msr(FPCR, fpcr); + + Bind(&no_write_required); + AssertFPCRState(fpcr); +} + + +void MacroAssembler::CanonicalizeNaN(const FPRegister& dst, + const FPRegister& src) { + AssertFPCRState(); + + // With DN=1 and RMode=FPTieEven, subtracting 0.0 preserves all inputs except + // for NaNs, which become the default NaN. We use fsub rather than fadd + // because sub preserves -0.0 inputs: -0.0 + 0.0 = 0.0, but -0.0 - 0.0 = -0.0. + Fsub(dst, src, fp_zero); +} + + +void MacroAssembler::LoadRoot(CPURegister destination, Heap::RootListIndex index) { // TODO(jbramley): Most root values are constants, and can be synthesized // without a load. Refer to the ARM back end for details. @@ -1488,9 +1572,9 @@ void MacroAssembler::Throw(BailoutReason reason) { } -void MacroAssembler::ThrowIf(Condition cc, BailoutReason reason) { +void MacroAssembler::ThrowIf(Condition cond, BailoutReason reason) { Label ok; - B(InvertCondition(cc), &ok); + B(InvertCondition(cond), &ok); Throw(reason); Bind(&ok); } @@ -1572,12 +1656,12 @@ void MacroAssembler::AssertString(Register object) { void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. - Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); + Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); } void MacroAssembler::TailCallStub(CodeStub* stub) { - Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET); + Jump(stub->GetCode(), RelocInfo::CODE_TARGET); } @@ -1589,20 +1673,13 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // Check that the number of arguments matches what the function expects. // If f->nargs is -1, the function can accept a variable number of arguments. - if (f->nargs >= 0 && f->nargs != num_arguments) { - // Illegal operation: drop the stack arguments and return undefined. - if (num_arguments > 0) { - Drop(num_arguments); - } - LoadRoot(x0, Heap::kUndefinedValueRootIndex); - return; - } + CHECK(f->nargs < 0 || f->nargs == num_arguments); // Place the necessary arguments. Mov(x0, num_arguments); Mov(x1, ExternalReference(f, isolate())); - CEntryStub stub(1, save_doubles); + CEntryStub stub(isolate(), 1, save_doubles); CallStub(&stub); } @@ -1634,9 +1711,7 @@ void MacroAssembler::CallApiFunctionAndReturn( Label profiler_disabled; Label end_profiler_check; - bool* is_profiling_flag = isolate()->cpu_profiler()->is_profiling_address(); - STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); - Mov(x10, reinterpret_cast<uintptr_t>(is_profiling_flag)); + Mov(x10, ExternalReference::is_profiling_address(isolate())); Ldrb(w10, MemOperand(x10)); Cbz(w10, &profiler_disabled); Mov(x3, thunk_ref); @@ -1680,7 +1755,7 @@ void MacroAssembler::CallApiFunctionAndReturn( // Native call returns to the DirectCEntry stub which redirects to the // return address pushed on stack (could have moved after GC). // DirectCEntry stub itself is generated early and never moves. - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(this, x3); if (FLAG_log_timer_events) { @@ -1764,15 +1839,15 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext, Mov(x0, num_arguments); Mov(x1, ext); - CEntryStub stub(1); + CEntryStub stub(isolate(), 1); CallStub(&stub); } void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { Mov(x1, builtin); - CEntryStub stub(1); - Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); + CEntryStub stub(isolate(), 1); + Jump(stub.GetCode(), RelocInfo::CODE_TARGET); } @@ -2237,11 +2312,11 @@ void MacroAssembler::LookupNumberStringCache(Register object, } -void MacroAssembler::TryConvertDoubleToInt(Register as_int, - FPRegister value, - FPRegister scratch_d, - Label* on_successful_conversion, - Label* on_failed_conversion) { +void MacroAssembler::TryRepresentDoubleAsInt(Register as_int, + FPRegister value, + FPRegister scratch_d, + Label* on_successful_conversion, + Label* on_failed_conversion) { // Convert to an int and back again, then compare with the original value. Fcvtzs(as_int, value); Scvtf(scratch_d, as_int); @@ -2870,7 +2945,8 @@ void MacroAssembler::TruncateDoubleToI(Register result, Push(lr); Push(double_input); // Put input on stack. - DoubleToIStub stub(jssp, + DoubleToIStub stub(isolate(), + jssp, result, 0, true, // is_truncating @@ -2898,7 +2974,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result, // If we fell through then inline version didn't succeed - call stub instead. Push(lr); - DoubleToIStub stub(object, + DoubleToIStub stub(isolate(), + object, result, HeapNumber::kValueOffset - kHeapObjectTag, true, // is_truncating @@ -3135,15 +3212,13 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -#ifdef ENABLE_DEBUGGER_SUPPORT void MacroAssembler::DebugBreak() { Mov(x0, 0); Mov(x1, ExternalReference(Runtime::kDebugBreak, isolate())); - CEntryStub ces(1); + CEntryStub ces(isolate(), 1); ASSERT(AllowThisStubCall(&ces)); - Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); + Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } -#endif void MacroAssembler::PushTryHandler(StackHandler::Kind kind, @@ -3260,14 +3335,13 @@ void MacroAssembler::Allocate(int object_size, // Calculate new top and bail out if new space is exhausted. Adds(scratch3, result, object_size); - B(vs, gc_required); - Cmp(scratch3, allocation_limit); + Ccmp(scratch3, allocation_limit, CFlag, cc); B(hi, gc_required); Str(scratch3, MemOperand(top_address)); // Tag the object if requested. if ((flags & TAG_OBJECT) != 0) { - Orr(result, result, kHeapObjectTag); + ObjectTag(result, result); } } @@ -3342,14 +3416,13 @@ void MacroAssembler::Allocate(Register object_size, Check(eq, kUnalignedAllocationInNewSpace); } - B(vs, gc_required); - Cmp(scratch3, allocation_limit); + Ccmp(scratch3, allocation_limit, CFlag, cc); B(hi, gc_required); Str(scratch3, MemOperand(top_address)); // Tag the object if requested. if ((flags & TAG_OBJECT) != 0) { - Orr(result, result, kHeapObjectTag); + ObjectTag(result, result); } } @@ -3533,32 +3606,50 @@ void MacroAssembler::AllocateHeapNumber(Register result, Label* gc_required, Register scratch1, Register scratch2, - Register heap_number_map) { + CPURegister value, + CPURegister heap_number_map) { + ASSERT(!value.IsValid() || value.Is64Bits()); + UseScratchRegisterScope temps(this); + // Allocate an object in the heap for the heap number and tag it as a heap // object. Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, - TAG_OBJECT); - - // Store heap number map in the allocated object. - if (heap_number_map.Is(NoReg)) { - heap_number_map = scratch1; + NO_ALLOCATION_FLAGS); + + // Prepare the heap number map. + if (!heap_number_map.IsValid()) { + // If we have a valid value register, use the same type of register to store + // the map so we can use STP to store both in one instruction. + if (value.IsValid() && value.IsFPRegister()) { + heap_number_map = temps.AcquireD(); + } else { + heap_number_map = scratch1; + } LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); } - AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); - Str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); -} - + if (emit_debug_code()) { + Register map; + if (heap_number_map.IsFPRegister()) { + map = scratch1; + Fmov(map, DoubleRegister(heap_number_map)); + } else { + map = Register(heap_number_map); + } + AssertRegisterIsRoot(map, Heap::kHeapNumberMapRootIndex); + } -void MacroAssembler::AllocateHeapNumberWithValue(Register result, - DoubleRegister value, - Label* gc_required, - Register scratch1, - Register scratch2, - Register heap_number_map) { - // TODO(all): Check if it would be more efficient to use STP to store both - // the map and the value. - AllocateHeapNumber(result, gc_required, scratch1, scratch2, heap_number_map); - Str(value, FieldMemOperand(result, HeapNumber::kValueOffset)); + // Store the heap number map and the value in the allocated object. + if (value.IsSameSizeAndType(heap_number_map)) { + STATIC_ASSERT(HeapObject::kMapOffset + kPointerSize == + HeapNumber::kValueOffset); + Stp(heap_number_map, value, MemOperand(result, HeapObject::kMapOffset)); + } else { + Str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); + if (value.IsValid()) { + Str(value, MemOperand(result, HeapNumber::kValueOffset)); + } + } + ObjectTag(result, result); } @@ -3848,7 +3939,6 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, Register elements_reg, Register scratch1, FPRegister fpscratch1, - FPRegister fpscratch2, Label* fail, int elements_offset) { ASSERT(!AreAliased(value_reg, key_reg, elements_reg, scratch1)); @@ -3866,12 +3956,9 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, fail, DONT_DO_SMI_CHECK); Ldr(fpscratch1, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); - Fmov(fpscratch2, FixedDoubleArray::canonical_not_the_hole_nan_as_double()); - // Check for NaN by comparing the number to itself: NaN comparison will - // report unordered, indicated by the overflow flag being set. - Fcmp(fpscratch1, fpscratch1); - Fcsel(fpscratch1, fpscratch2, fpscratch1, vs); + // Canonicalize NaNs. + CanonicalizeNaN(fpscratch1); // Store the result. Bind(&store_num); @@ -4136,7 +4223,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. Bind(&store_buffer_overflow); Push(lr); StoreBufferOverflowStub store_buffer_overflow_stub = - StoreBufferOverflowStub(fp_mode); + StoreBufferOverflowStub(isolate(), fp_mode); CallStub(&store_buffer_overflow_stub); Pop(lr); @@ -4329,7 +4416,8 @@ void MacroAssembler::RecordWrite(Register object, if (lr_status == kLRHasNotBeenSaved) { Push(lr); } - RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); + RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, + fp_mode); CallStub(&stub); if (lr_status == kLRHasNotBeenSaved) { Pop(lr); @@ -4674,8 +4762,7 @@ void MacroAssembler::Abort(BailoutReason reason) { // We need some scratch registers for the MacroAssembler, so make sure we have // some. This is safe here because Abort never returns. RegList old_tmp_list = TmpList()->list(); - TmpList()->Combine(ip0); - TmpList()->Combine(ip1); + TmpList()->Combine(MacroAssembler::DefaultTmpList()); if (use_real_aborts()) { // Avoid infinite recursion; Push contains some assertions that use Abort. @@ -4980,7 +5067,8 @@ void MacroAssembler::EmitFrameSetupForCodeAgePatching() { // TODO(jbramley): Other architectures use the internal memcpy to copy the // sequence. If this is a performance bottleneck, we should consider caching // the sequence and copying it in the same way. - InstructionAccurateScope scope(this, kCodeAgeSequenceSize / kInstructionSize); + InstructionAccurateScope scope(this, + kNoCodeAgeSequenceLength / kInstructionSize); ASSERT(jssp.Is(StackPointer())); EmitFrameSetupForCodeAgePatching(this); } @@ -4988,7 +5076,8 @@ void MacroAssembler::EmitFrameSetupForCodeAgePatching() { void MacroAssembler::EmitCodeAgeSequence(Code* stub) { - InstructionAccurateScope scope(this, kCodeAgeSequenceSize / kInstructionSize); + InstructionAccurateScope scope(this, + kNoCodeAgeSequenceLength / kInstructionSize); ASSERT(jssp.Is(StackPointer())); EmitCodeAgeSequence(this, stub); } @@ -5012,7 +5101,7 @@ void MacroAssembler::EmitFrameSetupForCodeAgePatching(Assembler * assm) { __ stp(fp, lr, MemOperand(jssp, 2 * kXRegSize)); __ add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); - __ AssertSizeOfCodeGeneratedSince(&start, kCodeAgeSequenceSize); + __ AssertSizeOfCodeGeneratedSince(&start, kNoCodeAgeSequenceLength); } @@ -5035,48 +5124,19 @@ void MacroAssembler::EmitCodeAgeSequence(Assembler * assm, __ AssertSizeOfCodeGeneratedSince(&start, kCodeAgeStubEntryOffset); if (stub) { __ dc64(reinterpret_cast<uint64_t>(stub->instruction_start())); - __ AssertSizeOfCodeGeneratedSince(&start, kCodeAgeSequenceSize); + __ AssertSizeOfCodeGeneratedSince(&start, kNoCodeAgeSequenceLength); } } -bool MacroAssembler::IsYoungSequence(byte* sequence) { - // Generate a young sequence to compare with. - const int length = kCodeAgeSequenceSize / kInstructionSize; - static bool initialized = false; - static byte young[kCodeAgeSequenceSize]; - if (!initialized) { - PatchingAssembler patcher(young, length); - // The young sequence is the frame setup code for FUNCTION code types. It is - // generated by FullCodeGenerator::Generate. - MacroAssembler::EmitFrameSetupForCodeAgePatching(&patcher); - initialized = true; - } - - bool is_young = (memcmp(sequence, young, kCodeAgeSequenceSize) == 0); - ASSERT(is_young || IsCodeAgeSequence(sequence)); +bool MacroAssembler::IsYoungSequence(Isolate* isolate, byte* sequence) { + bool is_young = isolate->code_aging_helper()->IsYoung(sequence); + ASSERT(is_young || + isolate->code_aging_helper()->IsOld(sequence)); return is_young; } -#ifdef DEBUG -bool MacroAssembler::IsCodeAgeSequence(byte* sequence) { - // The old sequence varies depending on the code age. However, the code up - // until kCodeAgeStubEntryOffset does not change, so we can check that part to - // get a reasonable level of verification. - const int length = kCodeAgeStubEntryOffset / kInstructionSize; - static bool initialized = false; - static byte old[kCodeAgeStubEntryOffset]; - if (!initialized) { - PatchingAssembler patcher(old, length); - MacroAssembler::EmitCodeAgeSequence(&patcher, NULL); - initialized = true; - } - return memcmp(sequence, old, kCodeAgeStubEntryOffset) == 0; -} -#endif - - void MacroAssembler::TruncatingDiv(Register result, Register dividend, int32_t divisor) { diff --git a/deps/v8/src/arm64/macro-assembler-arm64.h b/deps/v8/src/arm64/macro-assembler-arm64.h index 1777c38e3..7d267a2cb 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64.h +++ b/deps/v8/src/arm64/macro-assembler-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_MACRO_ASSEMBLER_ARM64_H_ #define V8_ARM64_MACRO_ASSEMBLER_ARM64_H_ @@ -255,8 +232,16 @@ class MacroAssembler : public Assembler { void Load(const Register& rt, const MemOperand& addr, Representation r); void Store(const Register& rt, const MemOperand& addr, Representation r); + enum AdrHint { + // The target must be within the immediate range of adr. + kAdrNear, + // The target may be outside of the immediate range of adr. Additional + // instructions may be emitted. + kAdrFar + }; + void Adr(const Register& rd, Label* label, AdrHint = kAdrNear); + // Remaining instructions are simple pass-through calls to the assembler. - inline void Adr(const Register& rd, Label* label); inline void Asr(const Register& rd, const Register& rn, unsigned shift); inline void Asr(const Register& rd, const Register& rn, const Register& rm); @@ -387,6 +372,7 @@ class MacroAssembler : public Assembler { const FPRegister& fm, const FPRegister& fa); inline void Frinta(const FPRegister& fd, const FPRegister& fn); + inline void Frintm(const FPRegister& fd, const FPRegister& fn); inline void Frintn(const FPRegister& fd, const FPRegister& fn); inline void Frintz(const FPRegister& fd, const FPRegister& fn); inline void Fsqrt(const FPRegister& fd, const FPRegister& fn); @@ -502,7 +488,8 @@ class MacroAssembler : public Assembler { // Pseudo-instructions ------------------------------------------------------ // Compute rd = abs(rm). - // This function clobbers the condition flags. + // This function clobbers the condition flags. On output the overflow flag is + // set iff the negation overflowed. // // If rm is the minimum representable value, the result is not representable. // Handlers for each case can be specified using the relevant labels. @@ -801,8 +788,15 @@ class MacroAssembler : public Assembler { // Root register. inline void InitializeRootRegister(); + void AssertFPCRState(Register fpcr = NoReg); + void ConfigureFPCR(); + void CanonicalizeNaN(const FPRegister& dst, const FPRegister& src); + void CanonicalizeNaN(const FPRegister& reg) { + CanonicalizeNaN(reg, reg); + } + // Load an object from the root table. - void LoadRoot(Register destination, + void LoadRoot(CPURegister destination, Heap::RootListIndex index); // Store an object to the root table. void StoreRoot(Register source, @@ -883,6 +877,9 @@ class MacroAssembler : public Assembler { void AssertNotSmi(Register object, BailoutReason reason = kOperandIsASmi); void AssertSmi(Register object, BailoutReason reason = kOperandIsNotASmi); + inline void ObjectTag(Register tagged_obj, Register obj); + inline void ObjectUntag(Register untagged_obj, Register obj); + // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); @@ -932,34 +929,34 @@ class MacroAssembler : public Assembler { DoubleRegister input, DoubleRegister dbl_scratch); - // Try to convert a double to a signed 32-bit int. + // Try to represent a double as a signed 32-bit int. // This succeeds if the result compares equal to the input, so inputs of -0.0 - // are converted to 0 and handled as a success. - // - // On output the Z flag is set if the conversion was successful. - void TryConvertDoubleToInt32(Register as_int, - FPRegister value, - FPRegister scratch_d, - Label* on_successful_conversion = NULL, - Label* on_failed_conversion = NULL) { + // are represented as 0 and handled as a success. + // + // On output the Z flag is set if the operation was successful. + void TryRepresentDoubleAsInt32(Register as_int, + FPRegister value, + FPRegister scratch_d, + Label* on_successful_conversion = NULL, + Label* on_failed_conversion = NULL) { ASSERT(as_int.Is32Bits()); - TryConvertDoubleToInt(as_int, value, scratch_d, on_successful_conversion, - on_failed_conversion); + TryRepresentDoubleAsInt(as_int, value, scratch_d, on_successful_conversion, + on_failed_conversion); } - // Try to convert a double to a signed 64-bit int. + // Try to represent a double as a signed 64-bit int. // This succeeds if the result compares equal to the input, so inputs of -0.0 - // are converted to 0 and handled as a success. - // - // On output the Z flag is set if the conversion was successful. - void TryConvertDoubleToInt64(Register as_int, - FPRegister value, - FPRegister scratch_d, - Label* on_successful_conversion = NULL, - Label* on_failed_conversion = NULL) { + // are represented as 0 and handled as a success. + // + // On output the Z flag is set if the operation was successful. + void TryRepresentDoubleAsInt64(Register as_int, + FPRegister value, + FPRegister scratch_d, + Label* on_successful_conversion = NULL, + Label* on_failed_conversion = NULL) { ASSERT(as_int.Is64Bits()); - TryConvertDoubleToInt(as_int, value, scratch_d, on_successful_conversion, - on_failed_conversion); + TryRepresentDoubleAsInt(as_int, value, scratch_d, on_successful_conversion, + on_failed_conversion); } // ---- Object Utilities ---- @@ -1055,7 +1052,7 @@ class MacroAssembler : public Assembler { void Throw(BailoutReason reason); // Throw a message string as an exception if a condition is not true. - void ThrowIf(Condition cc, BailoutReason reason); + void ThrowIf(Condition cond, BailoutReason reason); // Throw a message string as an exception if the value is a smi. void ThrowIfSmi(const Register& value, BailoutReason reason); @@ -1265,12 +1262,11 @@ class MacroAssembler : public Assembler { MacroAssembler* masm_; }; -#ifdef ENABLE_DEBUGGER_SUPPORT // --------------------------------------------------------------------------- // Debugger Support void DebugBreak(); -#endif + // --------------------------------------------------------------------------- // Exception handling @@ -1354,13 +1350,8 @@ class MacroAssembler : public Assembler { Label* gc_required, Register scratch1, Register scratch2, - Register heap_number_map = NoReg); - void AllocateHeapNumberWithValue(Register result, - DoubleRegister value, - Label* gc_required, - Register scratch1, - Register scratch2, - Register heap_number_map = NoReg); + CPURegister value = NoFPReg, + CPURegister heap_number_map = NoReg); // --------------------------------------------------------------------------- // Support functions. @@ -1549,7 +1540,6 @@ class MacroAssembler : public Assembler { Register elements_reg, Register scratch1, FPRegister fpscratch1, - FPRegister fpscratch2, Label* fail, int elements_offset = 0); @@ -1919,6 +1909,9 @@ class MacroAssembler : public Assembler { CPURegList* TmpList() { return &tmp_list_; } CPURegList* FPTmpList() { return &fptmp_list_; } + static CPURegList DefaultTmpList(); + static CPURegList DefaultFPTmpList(); + // Like printf, but print at run-time from generated code. // // The caller must ensure that arguments for floating-point placeholders @@ -1939,10 +1932,6 @@ class MacroAssembler : public Assembler { // PrintfNoPreserve. Callee-saved registers are not used by Printf, and are // implicitly preserved. // - // Unlike many MacroAssembler functions, x8 and x9 are guaranteed to be - // preserved, and can be printed. This allows Printf to be used during debug - // code. - // // This function assumes (and asserts) that the current stack pointer is // callee-saved, not caller-saved. This is most likely the case anyway, as a // caller-saved stack pointer doesn't make a lot of sense. @@ -1998,13 +1987,7 @@ class MacroAssembler : public Assembler { // Return true if the sequence is a young sequence geneated by // EmitFrameSetupForCodeAgePatching. Otherwise, this method asserts that the // sequence is a code age sequence (emitted by EmitCodeAgeSequence). - static bool IsYoungSequence(byte* sequence); - -#ifdef DEBUG - // Return true if the sequence is a code age sequence generated by - // EmitCodeAgeSequence. - static bool IsCodeAgeSequence(byte* sequence); -#endif + static bool IsYoungSequence(Isolate* isolate, byte* sequence); // Jumps to found label if a prototype map has dictionary elements. void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0, @@ -2067,7 +2050,7 @@ class MacroAssembler : public Assembler { Condition cond, // eq for new space, ne otherwise. Label* branch); - // Try to convert a double to an int so that integer fast-paths may be + // Try to represent a double as an int so that integer fast-paths may be // used. Not every valid integer value is guaranteed to be caught. // It supports both 32-bit and 64-bit integers depending whether 'as_int' // is a W or X register. @@ -2075,12 +2058,12 @@ class MacroAssembler : public Assembler { // This does not distinguish between +0 and -0, so if this distinction is // important it must be checked separately. // - // On output the Z flag is set if the conversion was successful. - void TryConvertDoubleToInt(Register as_int, - FPRegister value, - FPRegister scratch_d, - Label* on_successful_conversion = NULL, - Label* on_failed_conversion = NULL); + // On output the Z flag is set if the operation was successful. + void TryRepresentDoubleAsInt(Register as_int, + FPRegister value, + FPRegister scratch_d, + Label* on_successful_conversion = NULL, + Label* on_failed_conversion = NULL); bool generating_stub_; #if DEBUG diff --git a/deps/v8/src/arm64/regexp-macro-assembler-arm64.cc b/deps/v8/src/arm64/regexp-macro-assembler-arm64.cc index 536580ab5..97040cf75 100644 --- a/deps/v8/src/arm64/regexp-macro-assembler-arm64.cc +++ b/deps/v8/src/arm64/regexp-macro-assembler-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1128,7 +1105,7 @@ void RegExpMacroAssemblerARM64::PushBacktrack(Label* label) { int target = label->pos(); __ Mov(w10, target + Code::kHeaderSize - kHeapObjectTag); } else { - __ Adr(x10, label); + __ Adr(x10, label, MacroAssembler::kAdrFar); __ Sub(x10, x10, code_pointer()); if (masm_->emit_debug_code()) { __ Cmp(x10, kWRegMask); @@ -1338,7 +1315,7 @@ int RegExpMacroAssemblerARM64::CheckStackGuardState(Address* return_address, ASSERT(*return_address <= re_code->instruction_start() + re_code->instruction_size()); - MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); + Object* result = Execution::HandleStackGuardInterrupt(isolate); if (*code_handle != re_code) { // Return address no longer valid int delta = code_handle->address() - re_code->address(); @@ -1454,7 +1431,7 @@ void RegExpMacroAssemblerARM64::CallCheckStackGuardState(Register scratch) { ExternalReference check_stack_guard_state = ExternalReference::re_check_stack_guard_state(isolate()); __ Mov(scratch, check_stack_guard_state); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm_, scratch); // The input string may have been moved in memory, we need to reload it. diff --git a/deps/v8/src/arm64/regexp-macro-assembler-arm64.h b/deps/v8/src/arm64/regexp-macro-assembler-arm64.h index 534fd5b01..5d0d925ec 100644 --- a/deps/v8/src/arm64/regexp-macro-assembler-arm64.h +++ b/deps/v8/src/arm64/regexp-macro-assembler-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_REGEXP_MACRO_ASSEMBLER_ARM64_H_ #define V8_ARM64_REGEXP_MACRO_ASSEMBLER_ARM64_H_ diff --git a/deps/v8/src/arm64/simulator-arm64.cc b/deps/v8/src/arm64/simulator-arm64.cc index cd475b40e..3c970f854 100644 --- a/deps/v8/src/arm64/simulator-arm64.cc +++ b/deps/v8/src/arm64/simulator-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdlib.h> #include <cmath> @@ -80,11 +57,11 @@ TEXT_COLOUR clr_printf = FLAG_log_colour ? COLOUR(GREEN) : ""; // This is basically the same as PrintF, with a guard for FLAG_trace_sim. -void PRINTF_CHECKING TraceSim(const char* format, ...) { +void Simulator::TraceSim(const char* format, ...) { if (FLAG_trace_sim) { va_list arguments; va_start(arguments, format); - OS::VPrint(format, arguments); + OS::VFPrint(stream_, format, arguments); va_end(arguments); } } @@ -392,7 +369,7 @@ Simulator::Simulator() last_debugger_input_(NULL), log_parameters_(NO_PARAM), isolate_(NULL) { - Init(NULL); + Init(stdout); CHECK(!FLAG_trace_sim && !FLAG_log_instruction_stats); } @@ -593,7 +570,7 @@ void Simulator::DoRuntimeCall(Instruction* instr) { break; case ExternalReference::BUILTIN_CALL: { - // MaybeObject* f(v8::internal::Arguments). + // Object* f(v8::internal::Arguments). TraceSim("Type: BUILTIN_CALL\n"); SimulatorRuntimeCall target = reinterpret_cast<SimulatorRuntimeCall>(external); @@ -1001,7 +978,8 @@ void Simulator::FPCompare(double val0, double val1) { void Simulator::SetBreakpoint(Instruction* location) { for (unsigned i = 0; i < breakpoints_.size(); i++) { if (breakpoints_.at(i).location == location) { - PrintF("Existing breakpoint at %p was %s\n", + PrintF(stream_, + "Existing breakpoint at %p was %s\n", reinterpret_cast<void*>(location), breakpoints_.at(i).enabled ? "disabled" : "enabled"); breakpoints_.at(i).enabled = !breakpoints_.at(i).enabled; @@ -1010,14 +988,15 @@ void Simulator::SetBreakpoint(Instruction* location) { } Breakpoint new_breakpoint = {location, true}; breakpoints_.push_back(new_breakpoint); - PrintF("Set a breakpoint at %p\n", reinterpret_cast<void*>(location)); + PrintF(stream_, + "Set a breakpoint at %p\n", reinterpret_cast<void*>(location)); } void Simulator::ListBreakpoints() { - PrintF("Breakpoints:\n"); + PrintF(stream_, "Breakpoints:\n"); for (unsigned i = 0; i < breakpoints_.size(); i++) { - PrintF("%p : %s\n", + PrintF(stream_, "%p : %s\n", reinterpret_cast<void*>(breakpoints_.at(i).location), breakpoints_.at(i).enabled ? "enabled" : "disabled"); } @@ -1035,7 +1014,7 @@ void Simulator::CheckBreakpoints() { } } if (hit_a_breakpoint) { - PrintF("Hit and disabled a breakpoint at %p.\n", + PrintF(stream_, "Hit and disabled a breakpoint at %p.\n", reinterpret_cast<void*>(pc_)); Debug(); } @@ -2000,7 +1979,8 @@ void Simulator::VisitDataProcessing2Source(Instruction* instr) { if (shift_op != NO_SHIFT) { // Shift distance encoded in the least-significant five/six bits of the // register. - int mask = (instr->SixtyFourBits() == 1) ? 0x3f : 0x1f; + int mask = (instr->SixtyFourBits() == 1) ? kShiftAmountXRegMask + : kShiftAmountWRegMask; unsigned shift = wreg(instr->Rm()) & mask; result = ShiftOperand(reg_size, reg(reg_size, instr->Rn()), shift_op, shift); @@ -2108,8 +2088,9 @@ void Simulator::VisitBitfield(Instruction* instr) { // Rotate source bitfield into place. int64_t result = (static_cast<uint64_t>(src) >> R) | (src << (reg_size - R)); // Determine the sign extension. - int64_t topbits = ((1L << (reg_size - diff - 1)) - 1) << (diff + 1); - int64_t signbits = extend && ((src >> S) & 1) ? topbits : 0; + int64_t topbits_preshift = (1L << (reg_size - diff - 1)) - 1; + int64_t signbits = (extend && ((src >> S) & 1) ? topbits_preshift : 0) + << (diff + 1); // Merge sign extension, dest/zero and bitfield. result = signbits | (result & mask) | (dst & ~mask); @@ -2389,6 +2370,10 @@ void Simulator::VisitFPDataProcessing1Source(Instruction* instr) { case FSQRT_d: set_dreg(fd, FPSqrt(dreg(fn))); break; case FRINTA_s: set_sreg(fd, FPRoundInt(sreg(fn), FPTieAway)); break; case FRINTA_d: set_dreg(fd, FPRoundInt(dreg(fn), FPTieAway)); break; + case FRINTM_s: + set_sreg(fd, FPRoundInt(sreg(fn), FPNegativeInfinity)); break; + case FRINTM_d: + set_dreg(fd, FPRoundInt(dreg(fn), FPNegativeInfinity)); break; case FRINTN_s: set_sreg(fd, FPRoundInt(sreg(fn), FPTieEven)); break; case FRINTN_d: set_dreg(fd, FPRoundInt(dreg(fn), FPTieEven)); break; case FRINTZ_s: set_sreg(fd, FPRoundInt(sreg(fn), FPZero)); break; @@ -2655,17 +2640,27 @@ double Simulator::FPRoundInt(double value, FPRounding round_mode) { double error = value - int_result; switch (round_mode) { case FPTieAway: { - // If the error is greater than 0.5, or is equal to 0.5 and the integer - // result is positive, round up. - if ((error > 0.5) || ((error == 0.5) && (int_result >= 0.0))) { + // Take care of correctly handling the range ]-0.5, -0.0], which must + // yield -0.0. + if ((-0.5 < value) && (value < 0.0)) { + int_result = -0.0; + + } else if ((error > 0.5) || ((error == 0.5) && (int_result >= 0.0))) { + // If the error is greater than 0.5, or is equal to 0.5 and the integer + // result is positive, round up. int_result++; } break; } case FPTieEven: { + // Take care of correctly handling the range [-0.5, -0.0], which must + // yield -0.0. + if ((-0.5 <= value) && (value < 0.0)) { + int_result = -0.0; + // If the error is greater than 0.5, or is equal to 0.5 and the integer // result is odd, round up. - if ((error > 0.5) || + } else if ((error > 0.5) || ((error == 0.5) && (fmod(int_result, 2) != 0))) { int_result++; } @@ -3166,12 +3161,12 @@ bool Simulator::GetValue(const char* desc, int64_t* value) { bool Simulator::PrintValue(const char* desc) { if (strcmp(desc, "csp") == 0) { ASSERT(CodeFromName(desc) == static_cast<int>(kSPRegInternalCode)); - PrintF("%s csp:%s 0x%016" PRIx64 "%s\n", + PrintF(stream_, "%s csp:%s 0x%016" PRIx64 "%s\n", clr_reg_name, clr_reg_value, xreg(31, Reg31IsStackPointer), clr_normal); return true; } else if (strcmp(desc, "wcsp") == 0) { ASSERT(CodeFromName(desc) == static_cast<int>(kSPRegInternalCode)); - PrintF("%s wcsp:%s 0x%08" PRIx32 "%s\n", + PrintF(stream_, "%s wcsp:%s 0x%08" PRIx32 "%s\n", clr_reg_name, clr_reg_value, wreg(31, Reg31IsStackPointer), clr_normal); return true; } @@ -3181,7 +3176,7 @@ bool Simulator::PrintValue(const char* desc) { if (i < 0 || static_cast<unsigned>(i) >= kNumberOfFPRegisters) return false; if (desc[0] == 'v') { - PrintF("%s %s:%s 0x%016" PRIx64 "%s (%s%s:%s %g%s %s:%s %g%s)\n", + PrintF(stream_, "%s %s:%s 0x%016" PRIx64 "%s (%s%s:%s %g%s %s:%s %g%s)\n", clr_fpreg_name, VRegNameForCode(i), clr_fpreg_value, double_to_rawbits(dreg(i)), clr_normal, @@ -3192,25 +3187,25 @@ bool Simulator::PrintValue(const char* desc) { clr_normal); return true; } else if (desc[0] == 'd') { - PrintF("%s %s:%s %g%s\n", + PrintF(stream_, "%s %s:%s %g%s\n", clr_fpreg_name, DRegNameForCode(i), clr_fpreg_value, dreg(i), clr_normal); return true; } else if (desc[0] == 's') { - PrintF("%s %s:%s %g%s\n", + PrintF(stream_, "%s %s:%s %g%s\n", clr_fpreg_name, SRegNameForCode(i), clr_fpreg_value, sreg(i), clr_normal); return true; } else if (desc[0] == 'w') { - PrintF("%s %s:%s 0x%08" PRIx32 "%s\n", + PrintF(stream_, "%s %s:%s 0x%08" PRIx32 "%s\n", clr_reg_name, WRegNameForCode(i), clr_reg_value, wreg(i), clr_normal); return true; } else { // X register names have a wide variety of starting characters, but anything // else will be an X register. - PrintF("%s %s:%s 0x%016" PRIx64 "%s\n", + PrintF(stream_, "%s %s:%s 0x%016" PRIx64 "%s\n", clr_reg_name, XRegNameForCode(i), clr_reg_value, xreg(i), clr_normal); return true; } @@ -3529,14 +3524,16 @@ void Simulator::VisitException(Instruction* instr) { // terms of speed. if (FLAG_trace_sim_messages || FLAG_trace_sim || (parameters & BREAK)) { if (message != NULL) { - PrintF("%sDebugger hit %d: %s%s%s\n", + PrintF(stream_, + "%sDebugger hit %d: %s%s%s\n", clr_debug_number, code, clr_debug_message, message, clr_normal); } else { - PrintF("%sDebugger hit %d.%s\n", + PrintF(stream_, + "%sDebugger hit %d.%s\n", clr_debug_number, code, clr_normal); diff --git a/deps/v8/src/arm64/simulator-arm64.h b/deps/v8/src/arm64/simulator-arm64.h index 6a7353b46..543385b37 100644 --- a/deps/v8/src/arm64/simulator-arm64.h +++ b/deps/v8/src/arm64/simulator-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_SIMULATOR_ARM64_H_ #define V8_ARM64_SIMULATOR_ARM64_H_ @@ -785,6 +762,7 @@ class Simulator : public DecoderVisitor { // Output stream. FILE* stream_; PrintDisassembler* print_disasm_; + void PRINTF_METHOD_CHECKING TraceSim(const char* format, ...); // Instrumentation. Instrument* instrument_; diff --git a/deps/v8/src/arm64/stub-cache-arm64.cc b/deps/v8/src/arm64/stub-cache-arm64.cc index 1b2e95993..760fbb354 100644 --- a/deps/v8/src/arm64/stub-cache-arm64.cc +++ b/deps/v8/src/arm64/stub-cache-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -393,14 +370,28 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = descriptors->GetFieldType(descriptor); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + while (true) { + __ CompareMap(scratch1, it.Current()); + it.Advance(); + if (it.Done()) { + __ B(ne, miss_label); + break; + } + __ B(eq, &do_store); + } + __ Bind(&do_store); + } } else if (representation.IsDouble()) { UseScratchRegisterScope temps(masm); DoubleRegister temp_double = temps.AcquireD(); __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag); - Label do_store, heap_number; - __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2); - + Label do_store; __ JumpIfSmi(value_reg, &do_store); __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, @@ -408,7 +399,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); __ Bind(&do_store); - __ Str(temp_double, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); + __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2, temp_double); } // Stub never generated for non-global objects that require access checks. @@ -544,6 +535,22 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = lookup->GetFieldType(); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + while (true) { + __ CompareMap(scratch1, it.Current()); + it.Advance(); + if (it.Done()) { + __ B(ne, miss_label); + break; + } + __ B(eq, &do_store); + } + __ Bind(&do_store); + } } else if (representation.IsDouble()) { UseScratchRegisterScope temps(masm); DoubleRegister temp_double = temps.AcquireD(); @@ -755,7 +762,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, __ Mov(api_function_address, ref); // Jump to stub. - CallApiFunctionStub stub(is_store, call_data_undefined, argc); + CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); __ TailCallStub(&stub); } @@ -790,7 +797,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, Handle<JSObject> current = Handle<JSObject>::null(); if (type->IsConstant()) { - current = Handle<JSObject>::cast(type->AsConstant()); + current = Handle<JSObject>::cast(type->AsConstant()->Value()); } Handle<JSObject> prototype = Handle<JSObject>::null(); Handle<Map> current_map = receiver_map; @@ -814,7 +821,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(current.is_null() || - (current->property_dictionary()->FindEntry(*name) == + (current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound)); GenerateDictionaryNegativeLookup(masm(), miss, reg, name, @@ -964,15 +971,17 @@ void LoadStubCompiler::GenerateLoadField(Register reg, Representation representation) { __ Mov(receiver(), reg); if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(field.is_inobject(holder), + LoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } else { - KeyedLoadFieldStub stub(field.is_inobject(holder), + KeyedLoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } } @@ -1034,7 +1043,7 @@ void LoadStubCompiler::GenerateLoadCallback( ExternalReference ref = ExternalReference(&fun, type, isolate()); __ Mov(getter_address_reg, ref); - CallApiGetterStub stub; + CallApiGetterStub stub(isolate()); __ TailCallStub(&stub); } @@ -1127,19 +1136,6 @@ void LoadStubCompiler::GenerateLoadInterceptor( } -void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { - UseScratchRegisterScope temps(masm()); - // Check that the object is a boolean. - Register true_root = temps.AcquireX(); - Register false_root = temps.AcquireX(); - ASSERT(!AreAliased(object, true_root, false_root)); - __ LoadTrueFalseRoots(true_root, false_root); - __ Cmp(object, true_root); - __ Ccmp(object, false_root, ZFlag, ne); - __ B(ne, miss); -} - - Handle<Code> StoreStubCompiler::CompileStoreCallback( Handle<JSObject> object, Handle<JSObject> holder, diff --git a/deps/v8/src/arm64/utils-arm64.cc b/deps/v8/src/arm64/utils-arm64.cc index e2589f42e..53a2957e9 100644 --- a/deps/v8/src/arm64/utils-arm64.cc +++ b/deps/v8/src/arm64/utils-arm64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #if V8_TARGET_ARCH_ARM64 diff --git a/deps/v8/src/arm64/utils-arm64.h b/deps/v8/src/arm64/utils-arm64.h index a1fa12cfa..c739e50f2 100644 --- a/deps/v8/src/arm64/utils-arm64.h +++ b/deps/v8/src/arm64/utils-arm64.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ARM64_UTILS_ARM64_H_ #define V8_ARM64_UTILS_ARM64_H_ diff --git a/deps/v8/src/array-iterator.js b/deps/v8/src/array-iterator.js index 3af659dbc..10116b1d1 100644 --- a/deps/v8/src/array-iterator.js +++ b/deps/v8/src/array-iterator.js @@ -31,11 +31,6 @@ // in runtime.js: // var $Array = global.Array; -var ARRAY_ITERATOR_KIND_KEYS = 1; -var ARRAY_ITERATOR_KIND_VALUES = 2; -var ARRAY_ITERATOR_KIND_ENTRIES = 3; -// The spec draft also has "sparse" but it is never used. - var arrayIteratorObjectSymbol = GLOBAL_PRIVATE("ArrayIterator#object"); var arrayIteratorNextIndexSymbol = GLOBAL_PRIVATE("ArrayIterator#next"); var arrayIterationKindSymbol = GLOBAL_PRIVATE("ArrayIterator#kind"); @@ -79,25 +74,25 @@ function ArrayIteratorNext() { SET_PRIVATE(iterator, arrayIteratorNextIndexSymbol, index + 1); - if (itemKind == ARRAY_ITERATOR_KIND_VALUES) + if (itemKind == ITERATOR_KIND_VALUES) return CreateIteratorResultObject(array[index], false); - if (itemKind == ARRAY_ITERATOR_KIND_ENTRIES) + if (itemKind == ITERATOR_KIND_ENTRIES) return CreateIteratorResultObject([index, array[index]], false); return CreateIteratorResultObject(index, false); } function ArrayEntries() { - return CreateArrayIterator(this, ARRAY_ITERATOR_KIND_ENTRIES); + return CreateArrayIterator(this, ITERATOR_KIND_ENTRIES); } function ArrayValues() { - return CreateArrayIterator(this, ARRAY_ITERATOR_KIND_VALUES); + return CreateArrayIterator(this, ITERATOR_KIND_VALUES); } function ArrayKeys() { - return CreateArrayIterator(this, ARRAY_ITERATOR_KIND_KEYS); + return CreateArrayIterator(this, ITERATOR_KIND_KEYS); } function SetUpArrayIterator() { diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js index e48230e2b..dcaf0f400 100644 --- a/deps/v8/src/array.js +++ b/deps/v8/src/array.js @@ -1,29 +1,8 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +"use strict"; // This file relies on the fact that the following declarations have been made // in runtime.js: @@ -378,17 +357,18 @@ function ArrayToLocaleString() { function ArrayJoin(separator) { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.join"); - var length = TO_UINT32(this.length); + var array = TO_OBJECT_INLINE(this); + var length = TO_UINT32(array.length); if (IS_UNDEFINED(separator)) { separator = ','; } else if (!IS_STRING(separator)) { separator = NonStringToString(separator); } - var result = %_FastAsciiArrayJoin(this, separator); + var result = %_FastAsciiArrayJoin(array, separator); if (!IS_UNDEFINED(result)) return result; - return Join(this, length, separator, ConvertToString); + return Join(array, length, separator, ConvertToString); } @@ -413,24 +393,20 @@ function ObservedArrayPop(n) { function ArrayPop() { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.pop"); - var n = TO_UINT32(this.length); + var array = TO_OBJECT_INLINE(this); + var n = TO_UINT32(array.length); if (n == 0) { - this.length = n; + array.length = n; return; } - if ($Object.isSealed(this)) { - throw MakeTypeError("array_functions_change_sealed", - ["Array.prototype.pop"]); - } - - if (%IsObserved(this)) - return ObservedArrayPop.call(this, n); + if (%IsObserved(array)) + return ObservedArrayPop.call(array, n); n--; - var value = this[n]; - Delete(this, ToName(n), true); - this.length = n; + var value = array[n]; + Delete(array, ToName(n), true); + array.length = n; return value; } @@ -444,13 +420,14 @@ function ObservedArrayPush() { for (var i = 0; i < m; i++) { this[i+n] = %_Arguments(i); } - this.length = n + m; + var new_length = n + m; + this.length = new_length; } finally { EndPerformSplice(this); EnqueueSpliceRecord(this, n, [], m); } - return this.length; + return new_length; } // Appends the arguments to the end of the array and returns the new @@ -458,21 +435,22 @@ function ObservedArrayPush() { function ArrayPush() { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.push"); - var n = TO_UINT32(this.length); - var m = %_ArgumentsLength(); - if (m > 0 && $Object.isSealed(this)) { - throw MakeTypeError("array_functions_change_sealed", - ["Array.prototype.push"]); - } - if (%IsObserved(this)) return ObservedArrayPush.apply(this, arguments); + var array = TO_OBJECT_INLINE(this); + var n = TO_UINT32(array.length); + var m = %_ArgumentsLength(); + for (var i = 0; i < m; i++) { - this[i+n] = %_Arguments(i); + // Use SetProperty rather than a direct keyed store to ensure that the store + // site doesn't become poisened with an elements transition KeyedStoreIC. + %SetProperty(array, i+n, %_Arguments(i), 0, kStrictMode); } - this.length = n + m; - return this.length; + + var new_length = n + m; + array.length = new_length; + return new_length; } @@ -541,33 +519,34 @@ function SparseReverse(array, len) { function ArrayReverse() { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.reverse"); - var j = TO_UINT32(this.length) - 1; + var array = TO_OBJECT_INLINE(this); + var j = TO_UINT32(array.length) - 1; - if (UseSparseVariant(this, j, IS_ARRAY(this))) { - SparseReverse(this, j+1); - return this; + if (UseSparseVariant(array, j, IS_ARRAY(array))) { + SparseReverse(array, j+1); + return array; } for (var i = 0; i < j; i++, j--) { - var current_i = this[i]; - if (!IS_UNDEFINED(current_i) || i in this) { - var current_j = this[j]; - if (!IS_UNDEFINED(current_j) || j in this) { - this[i] = current_j; - this[j] = current_i; + var current_i = array[i]; + if (!IS_UNDEFINED(current_i) || i in array) { + var current_j = array[j]; + if (!IS_UNDEFINED(current_j) || j in array) { + array[i] = current_j; + array[j] = current_i; } else { - this[j] = current_i; - delete this[i]; + array[j] = current_i; + delete array[i]; } } else { - var current_j = this[j]; - if (!IS_UNDEFINED(current_j) || j in this) { - this[i] = current_j; - delete this[j]; + var current_j = array[j]; + if (!IS_UNDEFINED(current_j) || j in array) { + array[i] = current_j; + delete array[j]; } } } - return this; + return array; } @@ -589,30 +568,31 @@ function ObservedArrayShift(len) { function ArrayShift() { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.shift"); - var len = TO_UINT32(this.length); + var array = TO_OBJECT_INLINE(this); + var len = TO_UINT32(array.length); if (len === 0) { - this.length = 0; + array.length = 0; return; } - if ($Object.isSealed(this)) { + if (ObjectIsSealed(array)) { throw MakeTypeError("array_functions_change_sealed", ["Array.prototype.shift"]); } - if (%IsObserved(this)) - return ObservedArrayShift.call(this, len); + if (%IsObserved(array)) + return ObservedArrayShift.call(array, len); - var first = this[0]; + var first = array[0]; - if (IS_ARRAY(this)) { - SmartMove(this, 0, 1, len, 0); + if (IS_ARRAY(array)) { + SmartMove(array, 0, 1, len, 0); } else { - SimpleMove(this, 0, 1, len, 0); + SimpleMove(array, 0, 1, len, 0); } - this.length = len - 1; + array.length = len - 1; return first; } @@ -627,61 +607,48 @@ function ObservedArrayUnshift() { for (var i = 0; i < num_arguments; i++) { this[i] = %_Arguments(i); } - this.length = len + num_arguments; + var new_length = len + num_arguments; + this.length = new_length; } finally { EndPerformSplice(this); EnqueueSpliceRecord(this, 0, [], num_arguments); } - return len + num_arguments; + return new_length; } function ArrayUnshift(arg1) { // length == 1 CHECK_OBJECT_COERCIBLE(this, "Array.prototype.unshift"); - var len = TO_UINT32(this.length); - var num_arguments = %_ArgumentsLength(); - var is_sealed = $Object.isSealed(this); - - if (num_arguments > 0 && is_sealed) { - throw MakeTypeError("array_functions_change_sealed", - ["Array.prototype.unshift"]); - } - if (%IsObserved(this)) return ObservedArrayUnshift.apply(this, arguments); - if (IS_ARRAY(this) && !is_sealed) { - SmartMove(this, 0, 0, len, num_arguments); - } else { - if (num_arguments == 0 && $Object.isFrozen(this)) { - // In the zero argument case, values from the prototype come into the - // object. This can't be allowed on frozen arrays. - for (var i = 0; i < len; i++) { - if (!this.hasOwnProperty(i) && !IS_UNDEFINED(this[i])) { - throw MakeTypeError("array_functions_on_frozen", - ["Array.prototype.shift"]); - } - } - } + var array = TO_OBJECT_INLINE(this); + var len = TO_UINT32(array.length); + var num_arguments = %_ArgumentsLength(); + var is_sealed = ObjectIsSealed(array); - SimpleMove(this, 0, 0, len, num_arguments); + if (IS_ARRAY(array) && !is_sealed) { + SmartMove(array, 0, 0, len, num_arguments); + } else { + SimpleMove(array, 0, 0, len, num_arguments); } for (var i = 0; i < num_arguments; i++) { - this[i] = %_Arguments(i); + array[i] = %_Arguments(i); } - this.length = len + num_arguments; - - return this.length; + var new_length = len + num_arguments; + array.length = new_length; + return new_length; } function ArraySlice(start, end) { CHECK_OBJECT_COERCIBLE(this, "Array.prototype.slice"); - var len = TO_UINT32(this.length); + var array = TO_OBJECT_INLINE(this); + var len = TO_UINT32(array.length); var start_i = TO_INTEGER(start); var end_i = len; @@ -705,13 +672,13 @@ function ArraySlice(start, end) { if (end_i < start_i) return result; - if (IS_ARRAY(this) && - !%IsObserved(this) && + if (IS_ARRAY(array) && + !%IsObserved(array) && (end_i > 1000) && - (%EstimateNumberOfElements(this) < end_i)) { - SmartSlice(this, start_i, end_i - start_i, len, result); + (%EstimateNumberOfElements(array) < end_i)) { + SmartSlice(array, start_i, end_i - start_i, len, result); } else { - SimpleSlice(this, start_i, end_i - start_i, len, result); + SimpleSlice(array, start_i, end_i - start_i, len, result); } result.length = end_i - start_i; @@ -799,7 +766,8 @@ function ArraySplice(start, delete_count) { return ObservedArraySplice.apply(this, arguments); var num_arguments = %_ArgumentsLength(); - var len = TO_UINT32(this.length); + var array = TO_OBJECT_INLINE(this); + var len = TO_UINT32(array.length); var start_i = ComputeSpliceStartIndex(TO_INTEGER(start), len); var del_count = ComputeSpliceDeleteCount(delete_count, num_arguments, len, start_i); @@ -807,32 +775,32 @@ function ArraySplice(start, delete_count) { deleted_elements.length = del_count; var num_elements_to_add = num_arguments > 2 ? num_arguments - 2 : 0; - if (del_count != num_elements_to_add && $Object.isSealed(this)) { + if (del_count != num_elements_to_add && ObjectIsSealed(array)) { throw MakeTypeError("array_functions_change_sealed", ["Array.prototype.splice"]); - } else if (del_count > 0 && $Object.isFrozen(this)) { + } else if (del_count > 0 && ObjectIsFrozen(array)) { throw MakeTypeError("array_functions_on_frozen", ["Array.prototype.splice"]); } var use_simple_splice = true; - if (IS_ARRAY(this) && + if (IS_ARRAY(array) && num_elements_to_add !== del_count) { // If we are only deleting/moving a few things near the end of the // array then the simple version is going to be faster, because it // doesn't touch most of the array. - var estimated_non_hole_elements = %EstimateNumberOfElements(this); + var estimated_non_hole_elements = %EstimateNumberOfElements(array); if (len > 20 && (estimated_non_hole_elements >> 2) < (len - start_i)) { use_simple_splice = false; } } if (use_simple_splice) { - SimpleSlice(this, start_i, del_count, len, deleted_elements); - SimpleMove(this, start_i, del_count, len, num_elements_to_add); + SimpleSlice(array, start_i, del_count, len, deleted_elements); + SimpleMove(array, start_i, del_count, len, num_elements_to_add); } else { - SmartSlice(this, start_i, del_count, len, deleted_elements); - SmartMove(this, start_i, del_count, len, num_elements_to_add); + SmartSlice(array, start_i, del_count, len, deleted_elements); + SmartMove(array, start_i, del_count, len, num_elements_to_add); } // Insert the arguments into the resulting array in @@ -841,9 +809,9 @@ function ArraySplice(start, delete_count) { var arguments_index = 2; var arguments_length = %_ArgumentsLength(); while (arguments_index < arguments_length) { - this[i++] = %_Arguments(arguments_index++); + array[i++] = %_Arguments(arguments_index++); } - this.length = len - del_count + num_elements_to_add; + array.length = len - del_count + num_elements_to_add; // Return the deleted elements. return deleted_elements; @@ -1160,28 +1128,16 @@ function ArrayFilter(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(); var accumulator_length = 0; - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (%_CallFunction(receiver, element, i, array, f)) { - accumulator[accumulator_length++] = element; - } - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) { - accumulator[accumulator_length++] = element; - } + var stepping = %_DebugCallbackSupportsStepping(f); + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(f); + if (%_CallFunction(receiver, element, i, array, f)) { + accumulator[accumulator_length++] = element; } } - // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1205,24 +1161,14 @@ function ArrayForEach(f, receiver) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - %_CallFunction(receiver, element, i, array, f); - } + var stepping = %_DebugCallbackSupportsStepping(f); + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(f); + %_CallFunction(receiver, element, i, array, f); } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - %_CallFunction(receiver, element, i, array, f); - } - } - // End of duplicate. } } @@ -1246,24 +1192,14 @@ function ArraySome(f, receiver) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (%_CallFunction(receiver, element, i, array, f)) return true; - } + var stepping = %_DebugCallbackSupportsStepping(f); + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(f); + if (%_CallFunction(receiver, element, i, array, f)) return true; } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) return true; - } - } - // End of duplicate. } return false; } @@ -1286,24 +1222,14 @@ function ArrayEvery(f, receiver) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (!%_CallFunction(receiver, element, i, array, f)) return false; - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (!%_CallFunction(receiver, element, i, array, f)) return false; - } + var stepping = %_DebugCallbackSupportsStepping(f); + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(f); + if (!%_CallFunction(receiver, element, i, array, f)) return false; } - // End of duplicate. } return true; } @@ -1327,24 +1253,14 @@ function ArrayMap(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(length); - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - accumulator[i] = %_CallFunction(receiver, element, i, array, f); - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - accumulator[i] = %_CallFunction(receiver, element, i, array, f); - } + var stepping = %_DebugCallbackSupportsStepping(f); + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(f); + accumulator[i] = %_CallFunction(receiver, element, i, array, f); } - // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1484,27 +1400,14 @@ function ArrayReduce(callback, current) { } var receiver = %GetDefaultReceiver(callback); - - if (%DebugCallbackSupportsStepping(callback)) { - for (; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(callback); - current = - %_CallFunction(receiver, current, element, i, array, callback); - } + var stepping = %_DebugCallbackSupportsStepping(callback); + for (; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(callback); + current = %_CallFunction(receiver, current, element, i, array, callback); } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (; i < length; i++) { - if (i in array) { - var element = array[i]; - current = - %_CallFunction(receiver, current, element, i, array, callback); - } - } - // End of duplicate. } return current; } @@ -1534,27 +1437,14 @@ function ArrayReduceRight(callback, current) { } var receiver = %GetDefaultReceiver(callback); - - if (%DebugCallbackSupportsStepping(callback)) { - for (; i >= 0; i--) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(callback); - current = - %_CallFunction(receiver, current, element, i, array, callback); - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (; i >= 0; i--) { - if (i in array) { - var element = array[i]; - current = - %_CallFunction(receiver, current, element, i, array, callback); - } + var stepping = %_DebugCallbackSupportsStepping(callback); + for (; i >= 0; i--) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + if (stepping) %DebugPrepareStepInIfStepping(callback); + current = %_CallFunction(receiver, current, element, i, array, callback); } - // End of duplicate. } return current; } diff --git a/deps/v8/src/arraybuffer.js b/deps/v8/src/arraybuffer.js index d5fd9adba..44989f5db 100644 --- a/deps/v8/src/arraybuffer.js +++ b/deps/v8/src/arraybuffer.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; @@ -40,12 +17,12 @@ function ArrayBufferConstructor(length) { // length = 1 } } -function ArrayBufferGetByteLength() { +function ArrayBufferGetByteLen() { if (!IS_ARRAYBUFFER(this)) { throw MakeTypeError('incompatible_method_receiver', ['ArrayBuffer.prototype.byteLength', this]); } - return %ArrayBufferGetByteLength(this); + return %_ArrayBufferGetByteLength(this); } // ES6 Draft 15.13.5.5.3 @@ -60,7 +37,7 @@ function ArrayBufferSlice(start, end) { end = TO_INTEGER(end); } var first; - var byte_length = %ArrayBufferGetByteLength(this); + var byte_length = %_ArrayBufferGetByteLength(this); if (relativeStart < 0) { first = MathMax(byte_length + relativeStart, 0); } else { @@ -99,7 +76,7 @@ function SetUpArrayBuffer() { // Set up the constructor property on the ArrayBuffer prototype object. %SetProperty($ArrayBuffer.prototype, "constructor", $ArrayBuffer, DONT_ENUM); - InstallGetter($ArrayBuffer.prototype, "byteLength", ArrayBufferGetByteLength); + InstallGetter($ArrayBuffer.prototype, "byteLength", ArrayBufferGetByteLen); InstallFunctions($ArrayBuffer, DONT_ENUM, $Array( "isView", ArrayBufferIsView diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index 772b6d696..38604538b 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -39,6 +39,7 @@ #include "builtins.h" #include "counters.h" #include "cpu.h" +#include "cpu-profiler.h" #include "debug.h" #include "deoptimizer.h" #include "execution.h" @@ -190,7 +191,7 @@ PredictableCodeSizeScope::~PredictableCodeSizeScope() { #ifdef DEBUG CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f) : assembler_(assembler) { - ASSERT(CpuFeatures::IsSafeForSnapshot(f)); + ASSERT(CpuFeatures::IsSafeForSnapshot(assembler_->isolate(), f)); old_enabled_ = assembler_->enabled_cpu_features(); uint64_t mask = static_cast<uint64_t>(1) << f; // TODO(svenpanne) This special case below doesn't belong here! @@ -213,13 +214,14 @@ CpuFeatureScope::~CpuFeatureScope() { // ----------------------------------------------------------------------------- // Implementation of PlatformFeatureScope -PlatformFeatureScope::PlatformFeatureScope(CpuFeature f) - : old_cross_compile_(CpuFeatures::cross_compile_) { +PlatformFeatureScope::PlatformFeatureScope(Isolate* isolate, CpuFeature f) + : isolate_(isolate), old_cross_compile_(CpuFeatures::cross_compile_) { // CpuFeatures is a global singleton, therefore this is only safe in // single threaded code. - ASSERT(Serializer::enabled()); + ASSERT(Serializer::enabled(isolate)); uint64_t mask = static_cast<uint64_t>(1) << f; CpuFeatures::cross_compile_ |= mask; + USE(isolate_); } @@ -722,7 +724,10 @@ RelocIterator::RelocIterator(Code* code, int mode_mask) { last_id_ = 0; last_position_ = 0; byte* sequence = code->FindCodeAgeSequence(); - if (sequence != NULL && !Code::IsYoungSequence(sequence)) { + // We get the isolate from the map, because at serialization time + // the code pointer has been cloned and isn't really in heap space. + Isolate* isolate = code->map()->GetIsolate(); + if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) { code_age_sequence_ = sequence; } else { code_age_sequence_ = NULL; @@ -779,9 +784,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { case RelocInfo::CONSTRUCT_CALL: return "code target (js construct call)"; case RelocInfo::DEBUG_BREAK: -#ifndef ENABLE_DEBUGGER_SUPPORT - UNREACHABLE(); -#endif return "debug break"; case RelocInfo::CODE_TARGET: return "code target"; @@ -808,9 +810,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { case RelocInfo::VENEER_POOL: return "veneer pool"; case RelocInfo::DEBUG_BREAK_SLOT: -#ifndef ENABLE_DEBUGGER_SUPPORT - UNREACHABLE(); -#endif return "debug break slot"; case RelocInfo::CODE_AGE_SEQUENCE: return "code_age_sequence"; @@ -860,7 +859,7 @@ void RelocInfo::Print(Isolate* isolate, FILE* out) { #ifdef VERIFY_HEAP -void RelocInfo::Verify() { +void RelocInfo::Verify(Isolate* isolate) { switch (rmode_) { case EMBEDDED_OBJECT: Object::VerifyPointer(target_object()); @@ -869,10 +868,6 @@ void RelocInfo::Verify() { Object::VerifyPointer(target_cell()); break; case DEBUG_BREAK: -#ifndef ENABLE_DEBUGGER_SUPPORT - UNREACHABLE(); - break; -#endif case CONSTRUCT_CALL: case CODE_TARGET_WITH_ID: case CODE_TARGET: { @@ -881,7 +876,7 @@ void RelocInfo::Verify() { CHECK(addr != NULL); // Check that we can find the right code object. Code* code = Code::GetCodeFromTargetAddress(addr); - Object* found = code->GetIsolate()->FindCodeObject(addr); + Object* found = isolate->FindCodeObject(addr); CHECK(found->IsCode()); CHECK(code->address() == HeapObject::cast(found)->address()); break; @@ -903,7 +898,7 @@ void RelocInfo::Verify() { UNREACHABLE(); break; case CODE_AGE_SEQUENCE: - ASSERT(Code::IsYoungSequence(pc_) || code_age_stub()->IsCode()); + ASSERT(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode()); break; } } @@ -1014,11 +1009,9 @@ ExternalReference::ExternalReference(const IC_Utility& ic_utility, Isolate* isolate) : address_(Redirect(isolate, ic_utility.address())) {} -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference::ExternalReference(const Debug_Address& debug_address, Isolate* isolate) : address_(debug_address.address(isolate)) {} -#endif ExternalReference::ExternalReference(StatsCounter* counter) : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {} @@ -1053,18 +1046,6 @@ ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) { } -ExternalReference ExternalReference::perform_gc_function(Isolate* isolate) { - return - ExternalReference(Redirect(isolate, FUNCTION_ADDR(Runtime::PerformGC))); -} - - -ExternalReference ExternalReference::out_of_memory_function(Isolate* isolate) { - return - ExternalReference(Redirect(isolate, FUNCTION_ADDR(Runtime::OutOfMemory))); -} - - ExternalReference ExternalReference::delete_handle_scope_extensions( Isolate* isolate) { return ExternalReference(Redirect( @@ -1340,6 +1321,30 @@ ExternalReference ExternalReference::address_of_uint32_bias() { } +ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) { + return ExternalReference(isolate->cpu_profiler()->is_profiling_address()); +} + + +ExternalReference ExternalReference::invoke_function_callback( + Isolate* isolate) { + Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); + ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; + ApiFunction thunk_fun(thunk_address); + return ExternalReference(&thunk_fun, thunk_type, isolate); +} + + +ExternalReference ExternalReference::invoke_accessor_getter_callback( + Isolate* isolate) { + Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); + ExternalReference::Type thunk_type = + ExternalReference::PROFILING_GETTER_CALL; + ApiFunction thunk_fun(thunk_address); + return ExternalReference(&thunk_fun, thunk_type, isolate); +} + + #ifndef V8_INTERPRETED_REGEXP ExternalReference ExternalReference::re_check_stack_guard_state( @@ -1538,7 +1543,6 @@ ExternalReference ExternalReference::mod_two_doubles_operation( } -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference ExternalReference::debug_break(Isolate* isolate) { return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break))); } @@ -1548,7 +1552,6 @@ ExternalReference ExternalReference::debug_step_in_fp_address( Isolate* isolate) { return ExternalReference(isolate->debug()->step_in_fp_addr()); } -#endif void PositionsRecorder::RecordPosition(int pos) { diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 0349b0658..c67253c48 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -107,6 +107,22 @@ class AssemblerBase: public Malloced { }; +// Avoids emitting debug code during the lifetime of this scope object. +class DontEmitDebugCodeScope BASE_EMBEDDED { + public: + explicit DontEmitDebugCodeScope(AssemblerBase* assembler) + : assembler_(assembler), old_value_(assembler->emit_debug_code()) { + assembler_->set_emit_debug_code(false); + } + ~DontEmitDebugCodeScope() { + assembler_->set_emit_debug_code(old_value_); + }; + private: + AssemblerBase* assembler_; + bool old_value_; +}; + + // Avoids using instructions that vary in size in unpredictable ways between the // snapshot and the running VM. class PredictableCodeSizeScope { @@ -142,10 +158,11 @@ class CpuFeatureScope BASE_EMBEDDED { // different CPU. class PlatformFeatureScope BASE_EMBEDDED { public: - explicit PlatformFeatureScope(CpuFeature f); + PlatformFeatureScope(Isolate* isolate, CpuFeature f); ~PlatformFeatureScope(); private: + Isolate* isolate_; uint64_t old_cross_compile_; }; @@ -237,7 +254,7 @@ enum SaveFPRegsMode { kDontSaveFPRegs, kSaveFPRegs }; // describe a property of the datum. Such rmodes are useful for GC // and nice disassembly output. -class RelocInfo BASE_EMBEDDED { +class RelocInfo { public: // The constant kNoPosition is used with the collecting of source positions // in the relocation information. Two types of source positions are collected @@ -503,7 +520,7 @@ class RelocInfo BASE_EMBEDDED { void Print(Isolate* isolate, FILE* out); #endif // ENABLE_DISASSEMBLER #ifdef VERIFY_HEAP - void Verify(); + void Verify(Isolate* isolate); #endif static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1; @@ -653,9 +670,7 @@ class RelocIterator: public Malloced { //---------------------------------------------------------------------------- class IC_Utility; class SCTableReference; -#ifdef ENABLE_DEBUGGER_SUPPORT class Debug_Address; -#endif // An ExternalReference represents a C++ address used in the generated @@ -668,7 +683,7 @@ class ExternalReference BASE_EMBEDDED { // Used in the simulator to support different native api calls. enum Type { // Builtin call. - // MaybeObject* f(v8::internal::Arguments). + // Object* f(v8::internal::Arguments). BUILTIN_CALL, // default // Builtin that takes float arguments and returns an int. @@ -725,9 +740,7 @@ class ExternalReference BASE_EMBEDDED { ExternalReference(const IC_Utility& ic_utility, Isolate* isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference(const Debug_Address& debug_address, Isolate* isolate); -#endif explicit ExternalReference(StatsCounter* counter); @@ -747,8 +760,6 @@ class ExternalReference BASE_EMBEDDED { static ExternalReference store_buffer_overflow_function( Isolate* isolate); static ExternalReference flush_icache_function(Isolate* isolate); - static ExternalReference perform_gc_function(Isolate* isolate); - static ExternalReference out_of_memory_function(Isolate* isolate); static ExternalReference delete_handle_scope_extensions(Isolate* isolate); static ExternalReference get_date_field_function(Isolate* isolate); @@ -850,15 +861,17 @@ class ExternalReference BASE_EMBEDDED { static ExternalReference cpu_features(); + static ExternalReference is_profiling_address(Isolate* isolate); + static ExternalReference invoke_function_callback(Isolate* isolate); + static ExternalReference invoke_accessor_getter_callback(Isolate* isolate); + Address address() const { return reinterpret_cast<Address>(address_); } -#ifdef ENABLE_DEBUGGER_SUPPORT // Function Debug::Break() static ExternalReference debug_break(Isolate* isolate); // Used to check if single stepping is enabled in generated code. static ExternalReference debug_step_in_fp_address(Isolate* isolate); -#endif #ifndef V8_INTERPRETED_REGEXP // C functions called from RegExp generated code. diff --git a/deps/v8/src/assert-scope.h b/deps/v8/src/assert-scope.h index 428e6d007..5086eaa42 100644 --- a/deps/v8/src/assert-scope.h +++ b/deps/v8/src/assert-scope.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ASSERT_SCOPE_H_ #define V8_ASSERT_SCOPE_H_ @@ -50,7 +27,8 @@ enum PerThreadAssertType { enum PerIsolateAssertType { JAVASCRIPT_EXECUTION_ASSERT, JAVASCRIPT_EXECUTION_THROWS, - ALLOCATION_FAILURE_ASSERT + ALLOCATION_FAILURE_ASSERT, + DEOPTIMIZATION_ASSERT }; @@ -268,6 +246,14 @@ typedef PerIsolateAssertScopeDebugOnly<ALLOCATION_FAILURE_ASSERT, false> typedef PerIsolateAssertScopeDebugOnly<ALLOCATION_FAILURE_ASSERT, true> AllowAllocationFailure; +// Scope to document where we do not expect deoptimization. +typedef PerIsolateAssertScopeDebugOnly<DEOPTIMIZATION_ASSERT, false> + DisallowDeoptimization; + +// Scope to introduce an exception to DisallowDeoptimization. +typedef PerIsolateAssertScopeDebugOnly<DEOPTIMIZATION_ASSERT, true> + AllowDeoptimization; + } } // namespace v8::internal #endif // V8_ASSERT_SCOPE_H_ diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc index f6cf18915..303c442f8 100644 --- a/deps/v8/src/ast.cc +++ b/deps/v8/src/ast.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "ast.h" @@ -56,29 +33,30 @@ AST_NODE_LIST(DECL_ACCEPT) // Implementation of other node functionality. -bool Expression::IsSmiLiteral() { +bool Expression::IsSmiLiteral() const { return AsLiteral() != NULL && AsLiteral()->value()->IsSmi(); } -bool Expression::IsStringLiteral() { +bool Expression::IsStringLiteral() const { return AsLiteral() != NULL && AsLiteral()->value()->IsString(); } -bool Expression::IsNullLiteral() { +bool Expression::IsNullLiteral() const { return AsLiteral() != NULL && AsLiteral()->value()->IsNull(); } -bool Expression::IsUndefinedLiteral(Isolate* isolate) { - VariableProxy* var_proxy = AsVariableProxy(); +bool Expression::IsUndefinedLiteral(Isolate* isolate) const { + const VariableProxy* var_proxy = AsVariableProxy(); if (var_proxy == NULL) return false; Variable* var = var_proxy->var(); // The global identifier "undefined" is immutable. Everything // else could be reassigned. return var != NULL && var->location() == Variable::UNALLOCATED && - var_proxy->name()->Equals(isolate->heap()->undefined_string()); + String::Equals(var_proxy->name(), + isolate->factory()->undefined_string()); } @@ -207,9 +185,10 @@ ObjectLiteralProperty::ObjectLiteralProperty( emit_store_ = true; key_ = key; value_ = value; - Object* k = *key->value(); + Handle<Object> k = key->value(); if (k->IsInternalizedString() && - zone->isolate()->heap()->proto_string()->Equals(String::cast(k))) { + String::Equals(Handle<String>::cast(k), + zone->isolate()->factory()->proto_string())) { kind_ = PROTOTYPE; } else if (value_->AsMaterializedLiteral() != NULL) { kind_ = MATERIALIZED_LITERAL; @@ -378,9 +357,9 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) { } else if (boilerplate_value->IsUninitialized()) { is_simple = false; JSObject::SetOwnElement( - array, i, handle(Smi::FromInt(0), isolate), SLOPPY); + array, i, handle(Smi::FromInt(0), isolate), SLOPPY).Assert(); } else { - JSObject::SetOwnElement(array, i, boilerplate_value, SLOPPY); + JSObject::SetOwnElement(array, i, boilerplate_value, SLOPPY).Assert(); } } @@ -463,7 +442,7 @@ void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) { } -bool BinaryOperation::ResultOverwriteAllowed() { +bool BinaryOperation::ResultOverwriteAllowed() const { switch (op_) { case Token::COMMA: case Token::OR: @@ -592,14 +571,9 @@ void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) { } -int Call::ComputeFeedbackSlotCount(Isolate* isolate) { +bool Call::IsUsingCallFeedbackSlot(Isolate* isolate) const { CallType call_type = GetCallType(isolate); - if (call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL) { - // Call only uses a slot in some cases. - return 1; - } - - return 0; + return (call_type != POSSIBLY_EVAL_CALL); } @@ -1177,7 +1151,7 @@ Handle<String> Literal::ToString() { } else { str = DoubleToCString(value_->Number(), buffer); } - return isolate_->factory()->NewStringFromAscii(CStrVector(str)); + return isolate_->factory()->NewStringFromAsciiChecked(str); } diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h index c6ee71ed8..0115d9882 100644 --- a/deps/v8/src/ast.h +++ b/deps/v8/src/ast.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_AST_H_ #define V8_AST_H_ @@ -182,15 +159,21 @@ class AstProperties V8_FINAL BASE_EMBEDDED { public: class Flags : public EnumSet<AstPropertiesFlag, int> {}; - AstProperties() : node_count_(0) {} +AstProperties() : node_count_(0), feedback_slots_(0) {} Flags* flags() { return &flags_; } int node_count() { return node_count_; } void add_node_count(int count) { node_count_ += count; } + int feedback_slots() const { return feedback_slots_; } + void increase_feedback_slots(int count) { + feedback_slots_ += count; + } + private: Flags flags_; int node_count_; + int feedback_slots_; }; @@ -215,9 +198,14 @@ class AstNode: public ZoneObject { int position() const { return position_; } // Type testing & conversion functions overridden by concrete subclasses. -#define DECLARE_NODE_FUNCTIONS(type) \ - bool Is##type() { return node_type() == AstNode::k##type; } \ - type* As##type() { return Is##type() ? reinterpret_cast<type*>(this) : NULL; } +#define DECLARE_NODE_FUNCTIONS(type) \ + bool Is##type() const { return node_type() == AstNode::k##type; } \ + type* As##type() { \ + return Is##type() ? reinterpret_cast<type*>(this) : NULL; \ + } \ + const type* As##type() const { \ + return Is##type() ? reinterpret_cast<const type*>(this) : NULL; \ + } AST_NODE_LIST(DECLARE_NODE_FUNCTIONS) #undef DECLARE_NODE_FUNCTIONS @@ -276,8 +264,7 @@ class SmallMapList V8_FINAL { int length() const { return list_.length(); } void AddMapIfMissing(Handle<Map> map, Zone* zone) { - map = Map::CurrentMapForDeprecated(map); - if (map.is_null()) return; + if (!Map::CurrentMapForDeprecated(map).ToHandle(&map)) return; for (int i = 0; i < length(); ++i) { if (at(i).is_identical_to(map)) return; } @@ -325,35 +312,35 @@ class Expression : public AstNode { kTest }; - virtual bool IsValidLeftHandSide() { return false; } + virtual bool IsValidReferenceExpression() const { return false; } // Helpers for ToBoolean conversion. - virtual bool ToBooleanIsTrue() { return false; } - virtual bool ToBooleanIsFalse() { return false; } + virtual bool ToBooleanIsTrue() const { return false; } + virtual bool ToBooleanIsFalse() const { return false; } // Symbols that cannot be parsed as array indices are considered property // names. We do not treat symbols that can be array indexes as property // names because [] for string objects is handled only by keyed ICs. - virtual bool IsPropertyName() { return false; } + virtual bool IsPropertyName() const { return false; } // True iff the result can be safely overwritten (to avoid allocation). // False for operations that can return one of their operands. - virtual bool ResultOverwriteAllowed() { return false; } + virtual bool ResultOverwriteAllowed() const { return false; } // True iff the expression is a literal represented as a smi. - bool IsSmiLiteral(); + bool IsSmiLiteral() const; // True iff the expression is a string literal. - bool IsStringLiteral(); + bool IsStringLiteral() const; // True iff the expression is the null literal. - bool IsNullLiteral(); + bool IsNullLiteral() const; // True if we can prove that the expression is the undefined literal. - bool IsUndefinedLiteral(Isolate* isolate); + bool IsUndefinedLiteral(Isolate* isolate) const; // Expression type bounds - Bounds bounds() { return bounds_; } + Bounds bounds() const { return bounds_; } void set_bounds(Bounds bounds) { bounds_ = bounds; } // Type feedback information for assignments and properties. @@ -925,8 +912,7 @@ class ForInStatement V8_FINAL : public ForEachStatement, } // Type feedback information. - virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; } - virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; } + virtual int ComputeFeedbackSlotCount() { return 1; } virtual void SetFirstFeedbackSlot(int slot) { for_in_feedback_slot_ = slot; } int ForInFeedbackSlot() { @@ -1346,7 +1332,7 @@ class Literal V8_FINAL : public Expression { public: DECLARE_NODE_TYPE(Literal) - virtual bool IsPropertyName() V8_OVERRIDE { + virtual bool IsPropertyName() const V8_OVERRIDE { if (value_->IsInternalizedString()) { uint32_t ignored; return !String::cast(*value_)->AsArrayIndex(&ignored); @@ -1359,10 +1345,10 @@ class Literal V8_FINAL : public Expression { return Handle<String>::cast(value_); } - virtual bool ToBooleanIsTrue() V8_OVERRIDE { + virtual bool ToBooleanIsTrue() const V8_OVERRIDE { return value_->BooleanValue(); } - virtual bool ToBooleanIsFalse() V8_OVERRIDE { + virtual bool ToBooleanIsFalse() const V8_OVERRIDE { return !value_->BooleanValue(); } @@ -1389,7 +1375,7 @@ class Literal V8_FINAL : public Expression { static bool Match(void* literal1, void* literal2) { Handle<String> s1 = static_cast<Literal*>(literal1)->ToString(); Handle<String> s2 = static_cast<Literal*>(literal2)->ToString(); - return s1->Equals(*s2); + return String::Equals(s1, s2); } TypeFeedbackId LiteralFeedbackId() const { return reuse(id()); } @@ -1637,19 +1623,17 @@ class VariableProxy V8_FINAL : public Expression { public: DECLARE_NODE_TYPE(VariableProxy) - virtual bool IsValidLeftHandSide() V8_OVERRIDE { - return var_ == NULL ? true : var_->IsValidLeftHandSide(); + virtual bool IsValidReferenceExpression() const V8_OVERRIDE { + return var_ == NULL ? true : var_->IsValidReference(); } - bool IsVariable(Handle<String> n) { + bool IsVariable(Handle<String> n) const { return !is_this() && name().is_identical_to(n); } - bool IsArguments() { return var_ != NULL && var_->is_arguments(); } + bool IsArguments() const { return var_ != NULL && var_->is_arguments(); } - bool IsLValue() { - return is_lvalue_; - } + bool IsLValue() const { return is_lvalue_; } Handle<String> name() const { return name_; } Variable* var() const { return var_; } @@ -1687,7 +1671,7 @@ class Property V8_FINAL : public Expression { public: DECLARE_NODE_TYPE(Property) - virtual bool IsValidLeftHandSide() V8_OVERRIDE { return true; } + virtual bool IsValidReferenceExpression() const V8_OVERRIDE { return true; } Expression* obj() const { return obj_; } Expression* key() const { return key_; } @@ -1754,8 +1738,7 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface { ZoneList<Expression*>* arguments() const { return arguments_; } // Type feedback information. - virtual ComputablePhase GetComputablePhase() { return AFTER_SCOPING; } - virtual int ComputeFeedbackSlotCount(Isolate* isolate); + virtual int ComputeFeedbackSlotCount() { return 1; } virtual void SetFirstFeedbackSlot(int slot) { call_feedback_slot_ = slot; } @@ -1798,6 +1781,7 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface { // Helpers to determine how to handle the call. CallType GetCallType(Isolate* isolate) const; + bool IsUsingCallFeedbackSlot(Isolate* isolate) const; #ifdef DEBUG // Used to assert that the FullCodeGenerator records the return site. @@ -1839,8 +1823,7 @@ class CallNew V8_FINAL : public Expression, public FeedbackSlotInterface { ZoneList<Expression*>* arguments() const { return arguments_; } // Type feedback information. - virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; } - virtual int ComputeFeedbackSlotCount(Isolate* isolate) { + virtual int ComputeFeedbackSlotCount() { return FLAG_pretenuring_call_new ? 2 : 1; } virtual void SetFirstFeedbackSlot(int slot) { @@ -1970,7 +1953,7 @@ class BinaryOperation V8_FINAL : public Expression { public: DECLARE_NODE_TYPE(BinaryOperation) - virtual bool ResultOverwriteAllowed(); + virtual bool ResultOverwriteAllowed() const V8_OVERRIDE; Token::Value op() const { return op_; } Expression* left() const { return left_; } @@ -2376,14 +2359,8 @@ class FunctionLiteral V8_FINAL : public Expression { void set_ast_properties(AstProperties* ast_properties) { ast_properties_ = *ast_properties; } - void set_slot_processor(DeferredFeedbackSlotProcessor* slot_processor) { - slot_processor_ = *slot_processor; - } - void ProcessFeedbackSlots(Isolate* isolate) { - slot_processor_.ProcessFeedbackSlots(isolate); - } int slot_count() { - return slot_processor_.slot_count(); + return ast_properties_.feedback_slots(); } bool dont_optimize() { return dont_optimize_reason_ != kNoReason; } BailoutReason dont_optimize_reason() { return dont_optimize_reason_; } @@ -2434,7 +2411,6 @@ class FunctionLiteral V8_FINAL : public Expression { ZoneList<Statement*>* body_; Handle<String> inferred_name_; AstProperties ast_properties_; - DeferredFeedbackSlotProcessor slot_processor_; BailoutReason dont_optimize_reason_; int materialized_literal_count_; @@ -2909,13 +2885,10 @@ private: \ class AstConstructionVisitor BASE_EMBEDDED { public: - explicit AstConstructionVisitor(Zone* zone) - : dont_optimize_reason_(kNoReason), - zone_(zone) { } + AstConstructionVisitor() : dont_optimize_reason_(kNoReason) { } AstProperties* ast_properties() { return &properties_; } BailoutReason dont_optimize_reason() { return dont_optimize_reason_; } - DeferredFeedbackSlotProcessor* slot_processor() { return &slot_processor_; } private: template<class> friend class AstNodeFactory; @@ -2933,20 +2906,20 @@ class AstConstructionVisitor BASE_EMBEDDED { } void add_slot_node(FeedbackSlotInterface* slot_node) { - slot_processor_.add_slot_node(zone_, slot_node); + int count = slot_node->ComputeFeedbackSlotCount(); + if (count > 0) { + slot_node->SetFirstFeedbackSlot(properties_.feedback_slots()); + properties_.increase_feedback_slots(count); + } } AstProperties properties_; - DeferredFeedbackSlotProcessor slot_processor_; BailoutReason dont_optimize_reason_; - Zone* zone_; }; class AstNullVisitor BASE_EMBEDDED { public: - explicit AstNullVisitor(Zone* zone) {} - // Node visitors. #define DEF_VISIT(type) \ void Visit##type(type* node) {} @@ -2962,9 +2935,7 @@ class AstNullVisitor BASE_EMBEDDED { template<class Visitor> class AstNodeFactory V8_FINAL BASE_EMBEDDED { public: - explicit AstNodeFactory(Zone* zone) - : zone_(zone), - visitor_(zone) { } + explicit AstNodeFactory(Zone* zone) : zone_(zone) { } Visitor* visitor() { return &visitor_; } diff --git a/deps/v8/src/atomicops.h b/deps/v8/src/atomicops.h index 08be2a7d3..9289d171b 100644 --- a/deps/v8/src/atomicops.h +++ b/deps/v8/src/atomicops.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // The routines exported by this module are subtle. If you use them, even if // you get the code right, it will depend on careful reasoning about atomicity @@ -63,6 +40,7 @@ namespace v8 { namespace internal { +typedef char Atomic8; typedef int32_t Atomic32; #ifdef V8_HOST_ARCH_64_BIT // We need to be able to go between Atomic64 and AtomicWord implicitly. This @@ -120,10 +98,12 @@ Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, Atomic32 new_value); void MemoryBarrier(); +void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value); void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value); void Acquire_Store(volatile Atomic32* ptr, Atomic32 value); void Release_Store(volatile Atomic32* ptr, Atomic32 value); +Atomic8 NoBarrier_Load(volatile const Atomic8* ptr); Atomic32 NoBarrier_Load(volatile const Atomic32* ptr); Atomic32 Acquire_Load(volatile const Atomic32* ptr); Atomic32 Release_Load(volatile const Atomic32* ptr); diff --git a/deps/v8/src/atomicops_internals_arm64_gcc.h b/deps/v8/src/atomicops_internals_arm64_gcc.h index e6cac1993..36e30a90c 100644 --- a/deps/v8/src/atomicops_internals_arm64_gcc.h +++ b/deps/v8/src/atomicops_internals_arm64_gcc.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. @@ -34,12 +11,16 @@ namespace v8 { namespace internal { inline void MemoryBarrier() { - __asm__ __volatile__ ( // NOLINT - "dmb ish \n\t" // Data memory barrier. - ::: "memory" - ); // NOLINT + __asm__ __volatile__ ("dmb ish" ::: "memory"); // NOLINT } +// NoBarrier versions of the operation include "memory" in the clobber list. +// This is not required for direct usage of the NoBarrier versions of the +// operations. However this is required for correctness when they are used as +// part of the Acquire or Release versions, to ensure that nothing from outside +// the call is reordered between the operation and the memory barrier. This does +// not change the code generated, so has no or minimal impact on the +// NoBarrier operations. inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, @@ -55,13 +36,12 @@ inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, "stxr %w[temp], %w[new_value], %[ptr] \n\t" // Try to store the new value. "cbnz %w[temp], 0b \n\t" // Retry if it did not work. "1: \n\t" - "clrex \n\t" // In case we didn't swap. : [prev]"=&r" (prev), [temp]"=&r" (temp), [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), + : [old_value]"IJr" (old_value), [new_value]"r" (new_value) - : "memory", "cc" + : "cc", "memory" ); // NOLINT return prev; @@ -101,7 +81,7 @@ inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, : [result]"=&r" (result), [temp]"=&r" (temp), [ptr]"+Q" (*ptr) - : [increment]"r" (increment) + : [increment]"IJr" (increment) : "memory" ); // NOLINT @@ -110,8 +90,10 @@ inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, Atomic32 increment) { + Atomic32 result; + MemoryBarrier(); - Atomic32 result = NoBarrier_AtomicIncrement(ptr, increment); + result = NoBarrier_AtomicIncrement(ptr, increment); MemoryBarrier(); return result; @@ -121,27 +103,9 @@ inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, Atomic32 new_value) { Atomic32 prev; - int32_t temp; - __asm__ __volatile__ ( // NOLINT - "0: \n\t" - "ldxr %w[prev], %[ptr] \n\t" // Load the previous value. - "cmp %w[prev], %w[old_value] \n\t" - "bne 1f \n\t" - "stxr %w[temp], %w[new_value], %[ptr] \n\t" // Try to store the new value. - "cbnz %w[temp], 0b \n\t" // Retry if it did not work. - "dmb ish \n\t" // Data memory barrier. - "1: \n\t" - // If the compare failed the 'dmb' is unnecessary, but we still need a - // 'clrex'. - "clrex \n\t" - : [prev]"=&r" (prev), - [temp]"=&r" (temp), - [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), - [new_value]"r" (new_value) - : "memory", "cc" - ); // NOLINT + prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); + MemoryBarrier(); return prev; } @@ -150,31 +114,17 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, Atomic32 new_value) { Atomic32 prev; - int32_t temp; MemoryBarrier(); - - __asm__ __volatile__ ( // NOLINT - "0: \n\t" - "ldxr %w[prev], %[ptr] \n\t" // Load the previous value. - "cmp %w[prev], %w[old_value] \n\t" - "bne 1f \n\t" - "stxr %w[temp], %w[new_value], %[ptr] \n\t" // Try to store the new value. - "cbnz %w[temp], 0b \n\t" // Retry if it did not work. - "1: \n\t" - // If the compare failed the we still need a 'clrex'. - "clrex \n\t" - : [prev]"=&r" (prev), - [temp]"=&r" (temp), - [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), - [new_value]"r" (new_value) - : "memory", "cc" - ); // NOLINT + prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); return prev; } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } @@ -185,8 +135,16 @@ inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { } inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { - MemoryBarrier(); - *ptr = value; + __asm__ __volatile__ ( // NOLINT + "stlr %w[value], %[ptr] \n\t" + : [ptr]"=Q" (*ptr) + : [value]"r" (value) + : "memory" + ); // NOLINT +} + +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return *ptr; } inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { @@ -194,8 +152,15 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { } inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { - Atomic32 value = *ptr; - MemoryBarrier(); + Atomic32 value; + + __asm__ __volatile__ ( // NOLINT + "ldar %w[value], %[ptr] \n\t" + : [value]"=r" (value) + : [ptr]"Q" (*ptr) + : "memory" + ); // NOLINT + return value; } @@ -221,13 +186,12 @@ inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, "stxr %w[temp], %[new_value], %[ptr] \n\t" "cbnz %w[temp], 0b \n\t" "1: \n\t" - "clrex \n\t" : [prev]"=&r" (prev), [temp]"=&r" (temp), [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), + : [old_value]"IJr" (old_value), [new_value]"r" (new_value) - : "memory", "cc" + : "cc", "memory" ); // NOLINT return prev; @@ -267,7 +231,7 @@ inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr, : [result]"=&r" (result), [temp]"=&r" (temp), [ptr]"+Q" (*ptr) - : [increment]"r" (increment) + : [increment]"IJr" (increment) : "memory" ); // NOLINT @@ -276,8 +240,10 @@ inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr, inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr, Atomic64 increment) { + Atomic64 result; + MemoryBarrier(); - Atomic64 result = NoBarrier_AtomicIncrement(ptr, increment); + result = NoBarrier_AtomicIncrement(ptr, increment); MemoryBarrier(); return result; @@ -287,25 +253,9 @@ inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr, Atomic64 old_value, Atomic64 new_value) { Atomic64 prev; - int32_t temp; - __asm__ __volatile__ ( // NOLINT - "0: \n\t" - "ldxr %[prev], %[ptr] \n\t" - "cmp %[prev], %[old_value] \n\t" - "bne 1f \n\t" - "stxr %w[temp], %[new_value], %[ptr] \n\t" - "cbnz %w[temp], 0b \n\t" - "dmb ish \n\t" - "1: \n\t" - "clrex \n\t" - : [prev]"=&r" (prev), - [temp]"=&r" (temp), - [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), - [new_value]"r" (new_value) - : "memory", "cc" - ); // NOLINT + prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); + MemoryBarrier(); return prev; } @@ -314,26 +264,9 @@ inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr, Atomic64 old_value, Atomic64 new_value) { Atomic64 prev; - int32_t temp; MemoryBarrier(); - - __asm__ __volatile__ ( // NOLINT - "0: \n\t" - "ldxr %[prev], %[ptr] \n\t" - "cmp %[prev], %[old_value] \n\t" - "bne 1f \n\t" - "stxr %w[temp], %[new_value], %[ptr] \n\t" - "cbnz %w[temp], 0b \n\t" - "1: \n\t" - "clrex \n\t" - : [prev]"=&r" (prev), - [temp]"=&r" (temp), - [ptr]"+Q" (*ptr) - : [old_value]"r" (old_value), - [new_value]"r" (new_value) - : "memory", "cc" - ); // NOLINT + prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); return prev; } @@ -348,8 +281,12 @@ inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { } inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { - MemoryBarrier(); - *ptr = value; + __asm__ __volatile__ ( // NOLINT + "stlr %x[value], %[ptr] \n\t" + : [ptr]"=Q" (*ptr) + : [value]"r" (value) + : "memory" + ); // NOLINT } inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { @@ -357,8 +294,15 @@ inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { } inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { - Atomic64 value = *ptr; - MemoryBarrier(); + Atomic64 value; + + __asm__ __volatile__ ( // NOLINT + "ldar %x[value], %[ptr] \n\t" + : [value]"=r" (value) + : [ptr]"Q" (*ptr) + : "memory" + ); // NOLINT + return value; } diff --git a/deps/v8/src/atomicops_internals_arm_gcc.h b/deps/v8/src/atomicops_internals_arm_gcc.h index 918920d02..b72ffb6a6 100644 --- a/deps/v8/src/atomicops_internals_arm_gcc.h +++ b/deps/v8/src/atomicops_internals_arm_gcc.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. // @@ -311,6 +288,14 @@ inline Atomic32 Release_Load(volatile const Atomic32* ptr) { return *ptr; } +// Byte accessors. + +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { return *ptr; } + } } // namespace v8::internal #endif // V8_ATOMICOPS_INTERNALS_ARM_GCC_H_ diff --git a/deps/v8/src/atomicops_internals_atomicword_compat.h b/deps/v8/src/atomicops_internals_atomicword_compat.h index 5934f7068..617aa73b5 100644 --- a/deps/v8/src/atomicops_internals_atomicword_compat.h +++ b/deps/v8/src/atomicops_internals_atomicword_compat.h @@ -1,29 +1,6 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. diff --git a/deps/v8/src/atomicops_internals_mac.h b/deps/v8/src/atomicops_internals_mac.h index 4bd0c09bd..5e6abe4a4 100644 --- a/deps/v8/src/atomicops_internals_mac.h +++ b/deps/v8/src/atomicops_internals_mac.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. @@ -93,6 +70,10 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, return Acquire_CompareAndSwap(ptr, old_value, new_value); } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } @@ -107,6 +88,10 @@ inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return *ptr; +} + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return *ptr; } diff --git a/deps/v8/src/atomicops_internals_mips_gcc.h b/deps/v8/src/atomicops_internals_mips_gcc.h index cb8f8b9d9..da9f6e993 100644 --- a/deps/v8/src/atomicops_internals_mips_gcc.h +++ b/deps/v8/src/atomicops_internals_mips_gcc.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. @@ -136,6 +113,10 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, return NoBarrier_CompareAndSwap(ptr, old_value, new_value); } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } @@ -154,6 +135,10 @@ inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return *ptr; +} + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return *ptr; } diff --git a/deps/v8/src/atomicops_internals_tsan.h b/deps/v8/src/atomicops_internals_tsan.h index 1819798a5..e10081249 100644 --- a/deps/v8/src/atomicops_internals_tsan.h +++ b/deps/v8/src/atomicops_internals_tsan.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation for compiler-based @@ -276,6 +253,10 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, return cmp; } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + __tsan_atomic8_store(ptr, value, __tsan_memory_order_relaxed); +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); } @@ -289,6 +270,10 @@ inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); } +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return __tsan_atomic8_load(ptr, __tsan_memory_order_relaxed); +} + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); } diff --git a/deps/v8/src/atomicops_internals_x86_gcc.cc b/deps/v8/src/atomicops_internals_x86_gcc.cc index 950b423f4..0b0e04c81 100644 --- a/deps/v8/src/atomicops_internals_x86_gcc.cc +++ b/deps/v8/src/atomicops_internals_x86_gcc.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This module gets enough CPU information to optimize the // atomicops module on x86. @@ -77,10 +54,10 @@ namespace { void AtomicOps_Internalx86CPUFeaturesInit() { using v8::internal::AtomicOps_Internalx86CPUFeatures; - uint32_t eax; - uint32_t ebx; - uint32_t ecx; - uint32_t edx; + uint32_t eax = 0; + uint32_t ebx = 0; + uint32_t ecx = 0; + uint32_t edx = 0; // Get vendor string (issue CPUID with eax = 0) cpuid(eax, ebx, ecx, edx, 0); diff --git a/deps/v8/src/atomicops_internals_x86_gcc.h b/deps/v8/src/atomicops_internals_x86_gcc.h index e58d598fb..c8950676b 100644 --- a/deps/v8/src/atomicops_internals_x86_gcc.h +++ b/deps/v8/src/atomicops_internals_x86_gcc.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. @@ -107,6 +84,10 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, return NoBarrier_CompareAndSwap(ptr, old_value, new_value); } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } @@ -152,6 +133,10 @@ inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { // See comments in Atomic64 version of Release_Store(), below. } +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return *ptr; +} + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return *ptr; } diff --git a/deps/v8/src/atomicops_internals_x86_msvc.h b/deps/v8/src/atomicops_internals_x86_msvc.h index ad9cf9d80..6376666ae 100644 --- a/deps/v8/src/atomicops_internals_x86_msvc.h +++ b/deps/v8/src/atomicops_internals_x86_msvc.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file is an internal atomic implementation, use atomicops.h instead. @@ -100,6 +77,10 @@ inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, return NoBarrier_CompareAndSwap(ptr, old_value, new_value); } +inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { + *ptr = value; +} + inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { *ptr = value; } @@ -114,6 +95,10 @@ inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { // See comments in Atomic64 version of Release_Store() below. } +inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { + return *ptr; +} + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return *ptr; } diff --git a/deps/v8/src/base/macros.h b/deps/v8/src/base/macros.h new file mode 100644 index 000000000..b99f01b23 --- /dev/null +++ b/deps/v8/src/base/macros.h @@ -0,0 +1,76 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_BASE_MACROS_H_ +#define V8_BASE_MACROS_H_ + +#include "../../include/v8stdint.h" + + +// The expression OFFSET_OF(type, field) computes the byte-offset +// of the specified field relative to the containing type. This +// corresponds to 'offsetof' (in stddef.h), except that it doesn't +// use 0 or NULL, which causes a problem with the compiler warnings +// we have enabled (which is also why 'offsetof' doesn't seem to work). +// Here we simply use the non-zero value 4, which seems to work. +#define OFFSET_OF(type, field) \ + (reinterpret_cast<intptr_t>(&(reinterpret_cast<type*>(4)->field)) - 4) + + +// The expression ARRAY_SIZE(a) is a compile-time constant of type +// size_t which represents the number of elements of the given +// array. You should only use ARRAY_SIZE on statically allocated +// arrays. +#define ARRAY_SIZE(a) \ + ((sizeof(a) / sizeof(*(a))) / \ + static_cast<size_t>(!(sizeof(a) % sizeof(*(a))))) + + +// A macro to disallow the evil copy constructor and operator= functions +// This should be used in the private: declarations for a class +#define DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName&) V8_DELETE; \ + void operator=(const TypeName&) V8_DELETE + + +// A macro to disallow all the implicit constructors, namely the +// default constructor, copy constructor and operator= functions. +// +// This should be used in the private: declarations for a class +// that wants to prevent anyone from instantiating it. This is +// especially useful for classes containing only static methods. +#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \ + TypeName() V8_DELETE; \ + DISALLOW_COPY_AND_ASSIGN(TypeName) + + +// Newly written code should use V8_INLINE and V8_NOINLINE directly. +#define INLINE(declarator) V8_INLINE declarator +#define NO_INLINE(declarator) V8_NOINLINE declarator + + +// Newly written code should use V8_WARN_UNUSED_RESULT. +#define MUST_USE_RESULT V8_WARN_UNUSED_RESULT + + +// Define DISABLE_ASAN macros. +#if defined(__has_feature) +#if __has_feature(address_sanitizer) +#define DISABLE_ASAN __attribute__((no_sanitize_address)) +#endif +#endif + + +#ifndef DISABLE_ASAN +#define DISABLE_ASAN +#endif + + +#if V8_CC_GNU +#define V8_IMMEDIATE_CRASH() __builtin_trap() +#else +#define V8_IMMEDIATE_CRASH() ((void(*)())0)() +#endif + +#endif // V8_BASE_MACROS_H_ diff --git a/deps/v8/src/bignum-dtoa.cc b/deps/v8/src/bignum-dtoa.cc index 2b91122af..fa80aad4e 100644 --- a/deps/v8/src/bignum-dtoa.cc +++ b/deps/v8/src/bignum-dtoa.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <cmath> diff --git a/deps/v8/src/bignum-dtoa.h b/deps/v8/src/bignum-dtoa.h index 93ec1f770..fc160aecd 100644 --- a/deps/v8/src/bignum-dtoa.h +++ b/deps/v8/src/bignum-dtoa.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_BIGNUM_DTOA_H_ #define V8_BIGNUM_DTOA_H_ diff --git a/deps/v8/src/bignum.cc b/deps/v8/src/bignum.cc index af0edde6d..e47f35530 100644 --- a/deps/v8/src/bignum.cc +++ b/deps/v8/src/bignum.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "../include/v8stdint.h" #include "utils.h" diff --git a/deps/v8/src/bignum.h b/deps/v8/src/bignum.h index dcc4fa702..744768f87 100644 --- a/deps/v8/src/bignum.h +++ b/deps/v8/src/bignum.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_BIGNUM_H_ #define V8_BIGNUM_H_ diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index c4d7adfbb..1e59f725c 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -1,44 +1,12 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "v8.h" +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. -#include "accessors.h" -#include "api.h" #include "bootstrapper.h" -#include "compiler.h" -#include "debug.h" -#include "execution.h" -#include "global-handles.h" + +#include "accessors.h" #include "isolate-inl.h" -#include "macro-assembler.h" #include "natives.h" -#include "objects-visiting.h" -#include "platform.h" #include "snapshot.h" #include "trig-table.h" #include "extensions/externalize-string-extension.h" @@ -51,7 +19,6 @@ namespace v8 { namespace internal { - NativesExternalStringResource::NativesExternalStringResource( Bootstrapper* bootstrapper, const char* source, @@ -86,10 +53,10 @@ Handle<String> Bootstrapper::NativesSourceLookup(int index) { new NativesExternalStringResource(this, source.start(), source.length()); - Handle<String> source_code = - isolate_->factory()->NewExternalStringFromAscii(resource); // We do not expect this to throw an exception. Change this if it does. - CHECK_NOT_EMPTY_HANDLE(isolate_, source_code); + Handle<String> source_code = + isolate_->factory()->NewExternalStringFromAscii( + resource).ToHandleChecked(); heap->natives_source_cache()->set(index, *source_code); } Handle<Object> cached_source(heap->natives_source_cache()->get(index), @@ -240,8 +207,11 @@ class Genesis BASE_EMBEDDED { ElementsKind elements_kind); bool InstallNatives(); - Handle<JSFunction> InstallTypedArray(const char* name, - ElementsKind elementsKind); + void InstallTypedArray( + const char* name, + ElementsKind elements_kind, + Handle<JSFunction>* fun, + Handle<Map>* external_map); bool InstallExperimentalNatives(); void InstallBuiltinFunctionIds(); void InstallJSFunctionResultCaches(); @@ -361,9 +331,8 @@ Handle<Context> Bootstrapper::CreateEnvironment( static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) { // object.__proto__ = proto; - Factory* factory = object->GetIsolate()->factory(); Handle<Map> old_to_map = Handle<Map>(object->map()); - Handle<Map> new_to_map = factory->CopyMap(old_to_map); + Handle<Map> new_to_map = Map::Copy(old_to_map); new_to_map->set_prototype(*proto); object->set_map(*new_to_map); } @@ -389,14 +358,14 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target, Factory* factory = isolate->factory(); Handle<String> internalized_name = factory->InternalizeUtf8String(name); Handle<Code> call_code = Handle<Code>(isolate->builtins()->builtin(call)); - Handle<JSFunction> function = prototype.is_null() ? - factory->NewFunctionWithoutPrototype(internalized_name, call_code) : - factory->NewFunctionWithPrototype(internalized_name, - type, - instance_size, - prototype, - call_code, - install_initial_map); + Handle<JSFunction> function = prototype.is_null() + ? factory->NewFunction(internalized_name, call_code) + : factory->NewFunctionWithPrototype(internalized_name, + type, + instance_size, + prototype, + call_code, + install_initial_map); PropertyAttributes attributes; if (target->IsJSBuiltinsObject()) { attributes = @@ -404,9 +373,8 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target, } else { attributes = DONT_ENUM; } - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - target, internalized_name, function, attributes)); + JSObject::SetLocalPropertyIgnoreAttributes( + target, internalized_name, function, attributes).Check(); if (set_instance_class_name) { function->shared()->set_instance_class_name(*internalized_name); } @@ -418,44 +386,48 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target, void Genesis::SetFunctionInstanceDescriptor( Handle<Map> map, PrototypePropertyMode prototypeMode) { int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5; - Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(0, size)); - DescriptorArray::WhitenessWitness witness(*descriptors); - - Handle<Foreign> length(factory()->NewForeign(&Accessors::FunctionLength)); - Handle<Foreign> name(factory()->NewForeign(&Accessors::FunctionName)); - Handle<Foreign> args(factory()->NewForeign(&Accessors::FunctionArguments)); - Handle<Foreign> caller(factory()->NewForeign(&Accessors::FunctionCaller)); - Handle<Foreign> prototype; - if (prototypeMode != DONT_ADD_PROTOTYPE) { - prototype = factory()->NewForeign(&Accessors::FunctionPrototype); - } + Map::EnsureDescriptorSlack(map, size); + PropertyAttributes attribs = static_cast<PropertyAttributes>( DONT_ENUM | DONT_DELETE | READ_ONLY); - map->set_instance_descriptors(*descriptors); + Handle<AccessorInfo> length = + Accessors::FunctionLengthInfo(isolate(), attribs); { // Add length. - CallbacksDescriptor d(*factory()->length_string(), *length, attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(length->name())), + length, attribs); + map->AppendDescriptor(&d); } + Handle<AccessorInfo> name = + Accessors::FunctionNameInfo(isolate(), attribs); { // Add name. - CallbacksDescriptor d(*factory()->name_string(), *name, attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(name->name())), + name, attribs); + map->AppendDescriptor(&d); } + Handle<AccessorInfo> args = + Accessors::FunctionArgumentsInfo(isolate(), attribs); { // Add arguments. - CallbacksDescriptor d(*factory()->arguments_string(), *args, attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(args->name())), + args, attribs); + map->AppendDescriptor(&d); } + Handle<AccessorInfo> caller = + Accessors::FunctionCallerInfo(isolate(), attribs); { // Add caller. - CallbacksDescriptor d(*factory()->caller_string(), *caller, attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(caller->name())), + caller, attribs); + map->AppendDescriptor(&d); } if (prototypeMode != DONT_ADD_PROTOTYPE) { - // Add prototype. if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) { attribs = static_cast<PropertyAttributes>(attribs & ~READ_ONLY); } - CallbacksDescriptor d(*factory()->prototype_string(), *prototype, attribs); - map->AppendDescriptor(&d, witness); + Handle<AccessorInfo> prototype = + Accessors::FunctionPrototypeInfo(isolate(), attribs); + CallbacksDescriptor d(Handle<Name>(Name::cast(prototype->name())), + prototype, attribs); + map->AppendDescriptor(&d); } } @@ -495,8 +467,8 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) { Handle<String> object_name = factory->Object_string(); { // --- O b j e c t --- - Handle<JSFunction> object_fun = - factory->NewFunction(object_name, factory->null_value()); + Handle<JSFunction> object_fun = factory->NewFunctionWithPrototype( + object_name, factory->null_value()); Handle<Map> object_function_map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); object_fun->set_initial_map(*object_function_map); @@ -521,17 +493,11 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) { // 262 15.3.4. Handle<String> empty_string = factory->InternalizeOneByteString(STATIC_ASCII_VECTOR("Empty")); - Handle<JSFunction> empty_function = - factory->NewFunctionWithoutPrototype(empty_string, SLOPPY); + Handle<Code> code(isolate->builtins()->builtin(Builtins::kEmptyFunction)); + Handle<JSFunction> empty_function = factory->NewFunction(empty_string, code); // --- E m p t y --- - Handle<Code> code = - Handle<Code>(isolate->builtins()->builtin( - Builtins::kEmptyFunction)); - empty_function->set_code(*code); - empty_function->shared()->set_code(*code); - Handle<String> source = - factory->NewStringFromOneByte(STATIC_ASCII_VECTOR("() {}")); + Handle<String> source = factory->NewStringFromStaticAscii("() {}"); Handle<Script> script = factory->NewScript(source); script->set_type(Smi::FromInt(Script::TYPE_NATIVE)); empty_function->shared()->set_script(*script); @@ -557,46 +523,47 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) { void Genesis::SetStrictFunctionInstanceDescriptor( Handle<Map> map, PrototypePropertyMode prototypeMode) { int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5; - Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(0, size)); - DescriptorArray::WhitenessWitness witness(*descriptors); + Map::EnsureDescriptorSlack(map, size); - Handle<Foreign> length(factory()->NewForeign(&Accessors::FunctionLength)); - Handle<Foreign> name(factory()->NewForeign(&Accessors::FunctionName)); Handle<AccessorPair> arguments(factory()->NewAccessorPair()); Handle<AccessorPair> caller(factory()->NewAccessorPair()); - Handle<Foreign> prototype; - if (prototypeMode != DONT_ADD_PROTOTYPE) { - prototype = factory()->NewForeign(&Accessors::FunctionPrototype); - } PropertyAttributes rw_attribs = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE); PropertyAttributes ro_attribs = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); - map->set_instance_descriptors(*descriptors); + Handle<AccessorInfo> length = + Accessors::FunctionLengthInfo(isolate(), ro_attribs); { // Add length. - CallbacksDescriptor d(*factory()->length_string(), *length, ro_attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(length->name())), + length, ro_attribs); + map->AppendDescriptor(&d); } + Handle<AccessorInfo> name = + Accessors::FunctionNameInfo(isolate(), ro_attribs); { // Add name. - CallbacksDescriptor d(*factory()->name_string(), *name, ro_attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(name->name())), + name, ro_attribs); + map->AppendDescriptor(&d); } { // Add arguments. - CallbacksDescriptor d(*factory()->arguments_string(), *arguments, + CallbacksDescriptor d(factory()->arguments_string(), arguments, rw_attribs); - map->AppendDescriptor(&d, witness); + map->AppendDescriptor(&d); } { // Add caller. - CallbacksDescriptor d(*factory()->caller_string(), *caller, rw_attribs); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(factory()->caller_string(), caller, rw_attribs); + map->AppendDescriptor(&d); } if (prototypeMode != DONT_ADD_PROTOTYPE) { // Add prototype. PropertyAttributes attribs = prototypeMode == ADD_WRITEABLE_PROTOTYPE ? rw_attribs : ro_attribs; - CallbacksDescriptor d(*factory()->prototype_string(), *prototype, attribs); - map->AppendDescriptor(&d, witness); + Handle<AccessorInfo> prototype = + Accessors::FunctionPrototypeInfo(isolate(), attribs); + CallbacksDescriptor d(Handle<Name>(Name::cast(prototype->name())), + prototype, attribs); + map->AppendDescriptor(&d); } } @@ -606,16 +573,13 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorFunction() { if (throw_type_error_function.is_null()) { Handle<String> name = factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("ThrowTypeError")); - throw_type_error_function = - factory()->NewFunctionWithoutPrototype(name, SLOPPY); Handle<Code> code(isolate()->builtins()->builtin( Builtins::kStrictModePoisonPill)); + throw_type_error_function = factory()->NewFunction(name, code); throw_type_error_function->set_map(native_context()->sloppy_function_map()); - throw_type_error_function->set_code(*code); - throw_type_error_function->shared()->set_code(*code); throw_type_error_function->shared()->DontAdaptArguments(); - JSObject::PreventExtensions(throw_type_error_function); + JSObject::PreventExtensions(throw_type_error_function).Assert(); } return throw_type_error_function; } @@ -757,15 +721,15 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals( Handle<JSObject> prototype = Handle<JSObject>( JSObject::cast(js_global_function->instance_prototype())); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - prototype, factory()->constructor_string(), - isolate()->object_function(), NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + prototype, factory()->constructor_string(), + isolate()->object_function(), NONE).Check(); } else { Handle<FunctionTemplateInfo> js_global_constructor( FunctionTemplateInfo::cast(js_global_template->constructor())); js_global_function = factory()->CreateApiFunction(js_global_constructor, + factory()->the_hole_value(), factory()->InnerGlobalObject); } @@ -793,6 +757,7 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals( FunctionTemplateInfo::cast(data->constructor())); global_proxy_function = factory()->CreateApiFunction(global_constructor, + factory()->the_hole_value(), factory()->OuterGlobalObject); } @@ -804,15 +769,17 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals( // Set global_proxy.__proto__ to js_global after ConfigureGlobalObjects // Return the global proxy. + Handle<JSGlobalProxy> global_proxy; if (global_object.location() != NULL) { ASSERT(global_object->IsJSGlobalProxy()); - return ReinitializeJSGlobalProxy( - global_proxy_function, - Handle<JSGlobalProxy>::cast(global_object)); + global_proxy = Handle<JSGlobalProxy>::cast(global_object); + factory()->ReinitializeJSGlobalProxy(global_proxy, global_proxy_function); } else { - return Handle<JSGlobalProxy>::cast( + global_proxy = Handle<JSGlobalProxy>::cast( factory()->NewJSObject(global_proxy_function, TENURED)); + global_proxy->set_hash(heap()->undefined_value()); } + return global_proxy; } @@ -836,11 +803,11 @@ void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) { native_context()->set_security_token(*inner_global); static const PropertyAttributes attributes = static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE); - ForceSetProperty(builtins_global, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("global")), - inner_global, - attributes); + Runtime::ForceSetObjectProperty(builtins_global, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("global")), + inner_global, + attributes).Assert(); // Set up the reference from the global object to the builtins object. JSGlobalObject::cast(*inner_global)->set_builtins(*builtins_global); TransferNamedProperties(inner_global_from_snapshot, inner_global); @@ -870,10 +837,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, Heap* heap = isolate->heap(); Handle<String> object_name = factory->Object_string(); - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - inner_global, object_name, - isolate->object_function(), DONT_ENUM)); + JSObject::SetLocalPropertyIgnoreAttributes( + inner_global, object_name, + isolate->object_function(), DONT_ENUM).Check(); Handle<JSObject> global = Handle<JSObject>(native_context()->global_object()); @@ -898,19 +864,18 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, // This assert protects an optimization in // HGraphBuilder::JSArrayBuilder::EmitMapCode() ASSERT(initial_map->elements_kind() == GetInitialFastElementsKind()); + Map::EnsureDescriptorSlack(initial_map, 1); - Handle<DescriptorArray> array_descriptors( - factory->NewDescriptorArray(0, 1)); - DescriptorArray::WhitenessWitness witness(*array_descriptors); - - Handle<Foreign> array_length(factory->NewForeign(&Accessors::ArrayLength)); PropertyAttributes attribs = static_cast<PropertyAttributes>( DONT_ENUM | DONT_DELETE); - initial_map->set_instance_descriptors(*array_descriptors); + Handle<AccessorInfo> array_length = + Accessors::ArrayLengthInfo(isolate, attribs); { // Add length. - CallbacksDescriptor d(*factory->length_string(), *array_length, attribs); - array_function->initial_map()->AppendDescriptor(&d, witness); + CallbacksDescriptor d( + Handle<Name>(Name::cast(array_length->name())), + array_length, attribs); + array_function->initial_map()->AppendDescriptor(&d); } // array_function is used internally. JS code creating array object should @@ -922,7 +887,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, // Cache the array maps, needed by ArrayConstructorStub CacheInitialJSArrayMaps(native_context(), initial_map); ArrayConstructorStub array_constructor_stub(isolate); - Handle<Code> code = array_constructor_stub.GetCode(isolate); + Handle<Code> code = array_constructor_stub.GetCode(); array_function->shared()->set_construct_stub(*code); } @@ -953,19 +918,16 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, Handle<Map> string_map = Handle<Map>(native_context()->string_function()->initial_map()); - Handle<DescriptorArray> string_descriptors( - factory->NewDescriptorArray(0, 1)); - DescriptorArray::WhitenessWitness witness(*string_descriptors); + Map::EnsureDescriptorSlack(string_map, 1); - Handle<Foreign> string_length( - factory->NewForeign(&Accessors::StringLength)); PropertyAttributes attribs = static_cast<PropertyAttributes>( DONT_ENUM | DONT_DELETE | READ_ONLY); - string_map->set_instance_descriptors(*string_descriptors); + Handle<AccessorInfo> string_length( + Accessors::StringLengthInfo(isolate, attribs)); { // Add length. - CallbacksDescriptor d(*factory->length_string(), *string_length, attribs); - string_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(factory->length_string(), string_length, attribs); + string_map->AppendDescriptor(&d); } } @@ -995,51 +957,49 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, PropertyAttributes final = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); - Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(0, 5); - DescriptorArray::WhitenessWitness witness(*descriptors); - initial_map->set_instance_descriptors(*descriptors); + Map::EnsureDescriptorSlack(initial_map, 5); { // ECMA-262, section 15.10.7.1. - FieldDescriptor field(heap->source_string(), + FieldDescriptor field(factory->source_string(), JSRegExp::kSourceFieldIndex, final, Representation::Tagged()); - initial_map->AppendDescriptor(&field, witness); + initial_map->AppendDescriptor(&field); } { // ECMA-262, section 15.10.7.2. - FieldDescriptor field(heap->global_string(), + FieldDescriptor field(factory->global_string(), JSRegExp::kGlobalFieldIndex, final, Representation::Tagged()); - initial_map->AppendDescriptor(&field, witness); + initial_map->AppendDescriptor(&field); } { // ECMA-262, section 15.10.7.3. - FieldDescriptor field(heap->ignore_case_string(), + FieldDescriptor field(factory->ignore_case_string(), JSRegExp::kIgnoreCaseFieldIndex, final, Representation::Tagged()); - initial_map->AppendDescriptor(&field, witness); + initial_map->AppendDescriptor(&field); } { // ECMA-262, section 15.10.7.4. - FieldDescriptor field(heap->multiline_string(), + FieldDescriptor field(factory->multiline_string(), JSRegExp::kMultilineFieldIndex, final, Representation::Tagged()); - initial_map->AppendDescriptor(&field, witness); + initial_map->AppendDescriptor(&field); } { // ECMA-262, section 15.10.7.5. PropertyAttributes writable = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE); - FieldDescriptor field(heap->last_index_string(), + FieldDescriptor field(factory->last_index_string(), JSRegExp::kLastIndexFieldIndex, writable, Representation::Tagged()); - initial_map->AppendDescriptor(&field, witness); + initial_map->AppendDescriptor(&field); } initial_map->set_inobject_properties(5); @@ -1050,7 +1010,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, initial_map->set_visitor_id(StaticVisitorBase::GetVisitorId(*initial_map)); // RegExp prototype object is itself a RegExp. - Handle<Map> proto_map = factory->CopyMap(initial_map); + Handle<Map> proto_map = Map::Copy(initial_map); proto_map->set_prototype(native_context()->initial_object_prototype()); Handle<JSObject> proto = factory->NewJSObjectFromMap(proto_map); proto->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex, @@ -1072,16 +1032,15 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, { // -- J S O N Handle<String> name = factory->InternalizeUtf8String("JSON"); - Handle<JSFunction> cons = factory->NewFunction(name, - factory->the_hole_value()); + Handle<JSFunction> cons = factory->NewFunctionWithPrototype( + name, factory->the_hole_value()); JSFunction::SetInstancePrototype(cons, Handle<Object>(native_context()->initial_object_prototype(), isolate)); cons->SetInstanceClassName(*name); Handle<JSObject> json_object = factory->NewJSObject(cons, TENURED); ASSERT(json_object->IsJSObject()); - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - global, name, json_object, DONT_ENUM)); + JSObject::SetLocalPropertyIgnoreAttributes( + global, name, json_object, DONT_ENUM).Check(); native_context()->set_json_object(*json_object); } @@ -1098,9 +1057,14 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, { // -- T y p e d A r r a y s #define INSTALL_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ { \ - Handle<JSFunction> fun = InstallTypedArray(#Type "Array", \ - TYPE##_ELEMENTS); \ + Handle<JSFunction> fun; \ + Handle<Map> external_map; \ + InstallTypedArray(#Type "Array", \ + TYPE##_ELEMENTS, \ + &fun, \ + &external_map); \ native_context()->set_##type##_array_fun(*fun); \ + native_context()->set_##type##_array_external_map(*external_map); \ } TYPED_ARRAYS(INSTALL_TYPED_ARRAY) #undef INSTALL_TYPED_ARRAY @@ -1132,11 +1096,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, // class_name equals 'Arguments'. Handle<String> arguments_string = factory->InternalizeOneByteString( STATIC_ASCII_VECTOR("Arguments")); - Handle<Code> code = Handle<Code>( - isolate->builtins()->builtin(Builtins::kIllegal)); - Handle<JSObject> prototype = - Handle<JSObject>( - JSObject::cast(native_context()->object_function()->prototype())); + Handle<Code> code(isolate->builtins()->builtin(Builtins::kIllegal)); + Handle<JSObject> prototype( + JSObject::cast(native_context()->object_function()->prototype())); Handle<JSFunction> function = factory->NewFunctionWithPrototype(arguments_string, @@ -1153,24 +1115,22 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, native_context()->set_sloppy_arguments_boilerplate(*result); // Note: length must be added as the first property and // callee must be added as the second property. - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - result, factory->length_string(), - factory->undefined_value(), DONT_ENUM, - Object::FORCE_TAGGED, FORCE_FIELD)); - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - result, factory->callee_string(), - factory->undefined_value(), DONT_ENUM, - Object::FORCE_TAGGED, FORCE_FIELD)); + JSObject::SetLocalPropertyIgnoreAttributes( + result, factory->length_string(), + factory->undefined_value(), DONT_ENUM, + Object::FORCE_TAGGED, FORCE_FIELD).Check(); + JSObject::SetLocalPropertyIgnoreAttributes( + result, factory->callee_string(), + factory->undefined_value(), DONT_ENUM, + Object::FORCE_TAGGED, FORCE_FIELD).Check(); #ifdef DEBUG LookupResult lookup(isolate); - result->LocalLookup(heap->callee_string(), &lookup); + result->LocalLookup(factory->callee_string(), &lookup); ASSERT(lookup.IsField()); ASSERT(lookup.GetFieldIndex().field_index() == Heap::kArgumentsCalleeIndex); - result->LocalLookup(heap->length_string(), &lookup); + result->LocalLookup(factory->length_string(), &lookup); ASSERT(lookup.IsField()); ASSERT(lookup.GetFieldIndex().field_index() == Heap::kArgumentsLengthIndex); @@ -1195,7 +1155,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, Handle<Map> old_map( native_context()->sloppy_arguments_boilerplate()->map()); - Handle<Map> new_map = factory->CopyMap(old_map); + Handle<Map> new_map = Map::Copy(old_map); new_map->set_pre_allocated_property_fields(2); Handle<JSObject> result = factory->NewJSObjectFromMap(new_map); // Set elements kind after allocating the object because @@ -1227,26 +1187,24 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, Heap::kStrictArgumentsObjectSize); // Create the descriptor array for the arguments object. - Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(0, 3); - DescriptorArray::WhitenessWitness witness(*descriptors); - map->set_instance_descriptors(*descriptors); + Map::EnsureDescriptorSlack(map, 3); { // length FieldDescriptor d( - *factory->length_string(), 0, DONT_ENUM, Representation::Tagged()); - map->AppendDescriptor(&d, witness); + factory->length_string(), 0, DONT_ENUM, Representation::Tagged()); + map->AppendDescriptor(&d); } { // callee - CallbacksDescriptor d(*factory->callee_string(), - *callee, + CallbacksDescriptor d(factory->callee_string(), + callee, attributes); - map->AppendDescriptor(&d, witness); + map->AppendDescriptor(&d); } { // caller - CallbacksDescriptor d(*factory->caller_string(), - *caller, + CallbacksDescriptor d(factory->caller_string(), + caller, attributes); - map->AppendDescriptor(&d, witness); + map->AppendDescriptor(&d); } map->set_function_with_prototype(true); @@ -1263,14 +1221,13 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, native_context()->set_strict_arguments_boilerplate(*result); // Add length property only for strict mode boilerplate. - CHECK_NOT_EMPTY_HANDLE(isolate, - JSObject::SetLocalPropertyIgnoreAttributes( - result, factory->length_string(), - factory->undefined_value(), DONT_ENUM)); + JSObject::SetLocalPropertyIgnoreAttributes( + result, factory->length_string(), + factory->undefined_value(), DONT_ENUM).Check(); #ifdef DEBUG LookupResult lookup(isolate); - result->LocalLookup(heap->length_string(), &lookup); + result->LocalLookup(factory->length_string(), &lookup); ASSERT(lookup.IsField()); ASSERT(lookup.GetFieldIndex().field_index() == Heap::kArgumentsLengthIndex); @@ -1330,18 +1287,26 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, } -Handle<JSFunction> Genesis::InstallTypedArray( - const char* name, ElementsKind elementsKind) { +void Genesis::InstallTypedArray( + const char* name, + ElementsKind elements_kind, + Handle<JSFunction>* fun, + Handle<Map>* external_map) { Handle<JSObject> global = Handle<JSObject>(native_context()->global_object()); Handle<JSFunction> result = InstallFunction(global, name, JS_TYPED_ARRAY_TYPE, JSTypedArray::kSize, isolate()->initial_object_prototype(), Builtins::kIllegal, false, true); Handle<Map> initial_map = isolate()->factory()->NewMap( - JS_TYPED_ARRAY_TYPE, JSTypedArray::kSizeWithInternalFields, elementsKind); + JS_TYPED_ARRAY_TYPE, + JSTypedArray::kSizeWithInternalFields, + elements_kind); result->set_initial_map(*initial_map); initial_map->set_constructor(*result); - return result; + *fun = result; + + ElementsKind external_kind = GetNextTransitionElementsKind(elements_kind); + *external_map = Map::AsElementsKind(initial_map, external_kind); } @@ -1371,6 +1336,16 @@ void Genesis::InitializeExperimentalGlobal() { isolate()->initial_object_prototype(), Builtins::kIllegal, true, true); } + { // -- S e t I t e r a t o r + Handle<Map> map = isolate()->factory()->NewMap( + JS_SET_ITERATOR_TYPE, JSSetIterator::kSize); + native_context()->set_set_iterator_map(*map); + } + { // -- M a p I t e r a t o r + Handle<Map> map = isolate()->factory()->NewMap( + JS_MAP_ITERATOR_TYPE, JSMapIterator::kSize); + native_context()->set_map_iterator_map(*map); + } } if (FLAG_harmony_generators) { @@ -1391,61 +1366,59 @@ void Genesis::InitializeExperimentalGlobal() { // Create maps for generator functions and their prototypes. Store those // maps in the native context. Handle<Map> function_map(native_context()->sloppy_function_map()); - Handle<Map> generator_function_map = factory()->CopyMap(function_map); + Handle<Map> generator_function_map = Map::Copy(function_map); generator_function_map->set_prototype(*generator_function_prototype); native_context()->set_sloppy_generator_function_map( *generator_function_map); Handle<Map> strict_mode_function_map( native_context()->strict_function_map()); - Handle<Map> strict_mode_generator_function_map = factory()->CopyMap( - strict_mode_function_map); + Handle<Map> strict_mode_generator_function_map = + Map::Copy(strict_mode_function_map); strict_mode_generator_function_map->set_prototype( *generator_function_prototype); native_context()->set_strict_generator_function_map( *strict_mode_generator_function_map); - Handle<Map> object_map(native_context()->object_function()->initial_map()); - Handle<Map> generator_object_prototype_map = factory()->CopyMap( - object_map, 0); + Handle<JSFunction> object_function(native_context()->object_function()); + Handle<Map> generator_object_prototype_map = Map::Create( + object_function, 0); generator_object_prototype_map->set_prototype( *generator_object_prototype); native_context()->set_generator_object_prototype_map( *generator_object_prototype_map); + } - // Create a map for generator result objects. - ASSERT(object_map->inobject_properties() == 0); - STATIC_ASSERT(JSGeneratorObject::kResultPropertyCount == 2); - Handle<Map> generator_result_map = factory()->CopyMap(object_map, - JSGeneratorObject::kResultPropertyCount); - ASSERT(generator_result_map->inobject_properties() == - JSGeneratorObject::kResultPropertyCount); + if (FLAG_harmony_collections || FLAG_harmony_generators) { + // Collection forEach uses an iterator result object. + // Generators return iteraror result objects. - Handle<DescriptorArray> descriptors = factory()->NewDescriptorArray(0, + STATIC_ASSERT(JSGeneratorObject::kResultPropertyCount == 2); + Handle<JSFunction> object_function(native_context()->object_function()); + ASSERT(object_function->initial_map()->inobject_properties() == 0); + Handle<Map> iterator_result_map = Map::Create( + object_function, JSGeneratorObject::kResultPropertyCount); + ASSERT(iterator_result_map->inobject_properties() == JSGeneratorObject::kResultPropertyCount); - DescriptorArray::WhitenessWitness witness(*descriptors); - generator_result_map->set_instance_descriptors(*descriptors); + Map::EnsureDescriptorSlack( + iterator_result_map, JSGeneratorObject::kResultPropertyCount); - Handle<String> value_string = factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("value")); - FieldDescriptor value_descr(*value_string, + FieldDescriptor value_descr(isolate()->factory()->value_string(), JSGeneratorObject::kResultValuePropertyIndex, NONE, Representation::Tagged()); - generator_result_map->AppendDescriptor(&value_descr, witness); + iterator_result_map->AppendDescriptor(&value_descr); - Handle<String> done_string = factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("done")); - FieldDescriptor done_descr(*done_string, + FieldDescriptor done_descr(isolate()->factory()->done_string(), JSGeneratorObject::kResultDonePropertyIndex, NONE, Representation::Tagged()); - generator_result_map->AppendDescriptor(&done_descr, witness); + iterator_result_map->AppendDescriptor(&done_descr); - generator_result_map->set_unused_property_fields(0); + iterator_result_map->set_unused_property_fields(0); ASSERT_EQ(JSGeneratorObject::kResultSize, - generator_result_map->instance_size()); - native_context()->set_generator_result_map(*generator_result_map); + iterator_result_map->instance_size()); + native_context()->set_iterator_result_map(*iterator_result_map); } } @@ -1461,10 +1434,12 @@ bool Genesis::CompileBuiltin(Isolate* isolate, int index) { bool Genesis::CompileExperimentalBuiltin(Isolate* isolate, int index) { Vector<const char> name = ExperimentalNatives::GetScriptName(index); Factory* factory = isolate->factory(); - Handle<String> source_code = + Handle<String> source_code; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, source_code, factory->NewStringFromAscii( - ExperimentalNatives::GetRawScriptSource(index)); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, source_code, false); + ExperimentalNatives::GetRawScriptSource(index)), + false); return CompileNative(isolate, name, source_code); } @@ -1473,9 +1448,7 @@ bool Genesis::CompileNative(Isolate* isolate, Vector<const char> name, Handle<String> source) { HandleScope scope(isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT isolate->debugger()->set_compiling_natives(true); -#endif // During genesis, the boilerplate for stack overflow won't work until the // environment has been at least partially initialized. Add a stack check // before entering JS code to catch overflow early. @@ -1491,9 +1464,7 @@ bool Genesis::CompileNative(Isolate* isolate, true); ASSERT(isolate->has_pending_exception() != result); if (!result) isolate->clear_pending_exception(); -#ifdef ENABLE_DEBUGGER_SUPPORT isolate->debugger()->set_compiling_natives(false); -#endif return result; } @@ -1513,8 +1484,8 @@ bool Genesis::CompileScriptCached(Isolate* isolate, // function and insert it into the cache. if (cache == NULL || !cache->Lookup(name, &function_info)) { ASSERT(source->IsOneByteRepresentation()); - Handle<String> script_name = factory->NewStringFromUtf8(name); - ASSERT(!script_name.is_null()); + Handle<String> script_name = + factory->NewStringFromUtf8(name).ToHandleChecked(); function_info = Compiler::CompileScript( source, script_name, @@ -1548,20 +1519,17 @@ bool Genesis::CompileScriptCached(Isolate* isolate, ? top_context->builtins() : top_context->global_object(), isolate); - bool has_pending_exception; - Execution::Call(isolate, fun, receiver, 0, NULL, &has_pending_exception); - if (has_pending_exception) return false; - return true; + return !Execution::Call( + isolate, fun, receiver, 0, NULL).is_null(); } -#define INSTALL_NATIVE(Type, name, var) \ - Handle<String> var##_name = \ - factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR(name)); \ - Object* var##_native = \ - native_context()->builtins()->GetPropertyNoExceptionThrown( \ - *var##_name); \ - native_context()->set_##var(Type::cast(var##_native)); +#define INSTALL_NATIVE(Type, name, var) \ + Handle<String> var##_name = \ + factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR(name)); \ + Handle<Object> var##_native = Object::GetProperty( \ + handle(native_context()->builtins()), var##_name).ToHandleChecked(); \ + native_context()->set_##var(Type::cast(*var##_native)); void Genesis::InstallNativeFunctions() { @@ -1598,13 +1566,18 @@ void Genesis::InstallNativeFunctions() { observers_begin_perform_splice); INSTALL_NATIVE(JSFunction, "EndPerformSplice", observers_end_perform_splice); + INSTALL_NATIVE(JSFunction, "NativeObjectObserve", + native_object_observe); + INSTALL_NATIVE(JSFunction, "NativeObjectGetNotifier", + native_object_get_notifier); + INSTALL_NATIVE(JSFunction, "NativeObjectNotifierPerformChange", + native_object_notifier_perform_change); } void Genesis::InstallExperimentalNativeFunctions() { INSTALL_NATIVE(JSFunction, "RunMicrotasks", run_microtasks); - INSTALL_NATIVE(JSFunction, "EnqueueExternalMicrotask", - enqueue_external_microtask); + INSTALL_NATIVE(JSFunction, "EnqueueMicrotask", enqueue_microtask); if (FLAG_harmony_proxies) { INSTALL_NATIVE(JSFunction, "DerivedHasTrap", derived_has_trap); @@ -1640,30 +1613,27 @@ Handle<JSFunction> Genesis::InstallInternalArray( Accessors::FunctionSetPrototype(array_function, prototype); InternalArrayConstructorStub internal_array_constructor_stub(isolate()); - Handle<Code> code = internal_array_constructor_stub.GetCode(isolate()); + Handle<Code> code = internal_array_constructor_stub.GetCode(); array_function->shared()->set_construct_stub(*code); array_function->shared()->DontAdaptArguments(); Handle<Map> original_map(array_function->initial_map()); - Handle<Map> initial_map = factory()->CopyMap(original_map); + Handle<Map> initial_map = Map::Copy(original_map); initial_map->set_elements_kind(elements_kind); array_function->set_initial_map(*initial_map); // Make "length" magic on instances. - Handle<DescriptorArray> array_descriptors( - factory()->NewDescriptorArray(0, 1)); - DescriptorArray::WhitenessWitness witness(*array_descriptors); + Map::EnsureDescriptorSlack(initial_map, 1); - Handle<Foreign> array_length(factory()->NewForeign( - &Accessors::ArrayLength)); PropertyAttributes attribs = static_cast<PropertyAttributes>( DONT_ENUM | DONT_DELETE); - initial_map->set_instance_descriptors(*array_descriptors); + Handle<AccessorInfo> array_length = + Accessors::ArrayLengthInfo(isolate(), attribs); { // Add length. CallbacksDescriptor d( - *factory()->length_string(), *array_length, attribs); - array_function->initial_map()->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(array_length->name())), array_length, attribs); + array_function->initial_map()->AppendDescriptor(&d); } return array_function; @@ -1708,23 +1678,20 @@ bool Genesis::InstallNatives() { Handle<String> global_string = factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("global")); Handle<Object> global_obj(native_context()->global_object(), isolate()); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - builtins, global_string, global_obj, attributes)); + JSObject::SetLocalPropertyIgnoreAttributes( + builtins, global_string, global_obj, attributes).Check(); Handle<String> builtins_string = factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("builtins")); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - builtins, builtins_string, builtins, attributes)); + JSObject::SetLocalPropertyIgnoreAttributes( + builtins, builtins_string, builtins, attributes).Check(); // Set up the reference from the global object to the builtins object. JSGlobalObject::cast(native_context()->global_object())-> set_builtins(*builtins); // Create a bridge function that has context in the native context. - Handle<JSFunction> bridge = - factory()->NewFunction(factory()->empty_string(), - factory()->undefined_value()); + Handle<JSFunction> bridge = factory()->NewFunctionWithPrototype( + factory()->empty_string(), factory()->undefined_value()); ASSERT(bridge->context() == *isolate()->native_context()); // Allocate the builtins context. @@ -1746,132 +1713,111 @@ bool Genesis::InstallNatives() { native_context()->set_script_function(*script_fun); Handle<Map> script_map = Handle<Map>(script_fun->initial_map()); + Map::EnsureDescriptorSlack(script_map, 13); - Handle<DescriptorArray> script_descriptors( - factory()->NewDescriptorArray(0, 13)); - DescriptorArray::WhitenessWitness witness(*script_descriptors); - - Handle<Foreign> script_source( - factory()->NewForeign(&Accessors::ScriptSource)); - Handle<Foreign> script_name(factory()->NewForeign(&Accessors::ScriptName)); - Handle<String> id_string(factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("id"))); - Handle<Foreign> script_id(factory()->NewForeign(&Accessors::ScriptId)); - Handle<String> line_offset_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("line_offset"))); - Handle<Foreign> script_line_offset( - factory()->NewForeign(&Accessors::ScriptLineOffset)); - Handle<String> column_offset_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("column_offset"))); - Handle<Foreign> script_column_offset( - factory()->NewForeign(&Accessors::ScriptColumnOffset)); - Handle<String> type_string(factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("type"))); - Handle<Foreign> script_type(factory()->NewForeign(&Accessors::ScriptType)); - Handle<String> compilation_type_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("compilation_type"))); - Handle<Foreign> script_compilation_type( - factory()->NewForeign(&Accessors::ScriptCompilationType)); - Handle<String> line_ends_string(factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("line_ends"))); - Handle<Foreign> script_line_ends( - factory()->NewForeign(&Accessors::ScriptLineEnds)); - Handle<String> context_data_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("context_data"))); - Handle<Foreign> script_context_data( - factory()->NewForeign(&Accessors::ScriptContextData)); - Handle<String> eval_from_script_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("eval_from_script"))); - Handle<Foreign> script_eval_from_script( - factory()->NewForeign(&Accessors::ScriptEvalFromScript)); - Handle<String> eval_from_script_position_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("eval_from_script_position"))); - Handle<Foreign> script_eval_from_script_position( - factory()->NewForeign(&Accessors::ScriptEvalFromScriptPosition)); - Handle<String> eval_from_function_name_string( - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("eval_from_function_name"))); - Handle<Foreign> script_eval_from_function_name( - factory()->NewForeign(&Accessors::ScriptEvalFromFunctionName)); PropertyAttributes attribs = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); - script_map->set_instance_descriptors(*script_descriptors); + Handle<AccessorInfo> script_column = + Accessors::ScriptColumnOffsetInfo(isolate(), attribs); { - CallbacksDescriptor d( - *factory()->source_string(), *script_source, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_column->name())), + script_column, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_id = + Accessors::ScriptIdInfo(isolate(), attribs); { - CallbacksDescriptor d(*factory()->name_string(), *script_name, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_id->name())), + script_id, attribs); + script_map->AppendDescriptor(&d); } + + Handle<AccessorInfo> script_name = + Accessors::ScriptNameInfo(isolate(), attribs); { - CallbacksDescriptor d(*id_string, *script_id, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_name->name())), + script_name, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_line = + Accessors::ScriptLineOffsetInfo(isolate(), attribs); { - CallbacksDescriptor d(*line_offset_string, *script_line_offset, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_line->name())), + script_line, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_source = + Accessors::ScriptSourceInfo(isolate(), attribs); { - CallbacksDescriptor d( - *column_offset_string, *script_column_offset, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_source->name())), + script_source, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_type = + Accessors::ScriptTypeInfo(isolate(), attribs); { - CallbacksDescriptor d(*type_string, *script_type, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_type->name())), + script_type, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_compilation_type = + Accessors::ScriptCompilationTypeInfo(isolate(), attribs); { CallbacksDescriptor d( - *compilation_type_string, *script_compilation_type, attribs); - script_map->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(script_compilation_type->name())), + script_compilation_type, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_line_ends = + Accessors::ScriptLineEndsInfo(isolate(), attribs); { - CallbacksDescriptor d(*line_ends_string, *script_line_ends, attribs); - script_map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(script_line_ends->name())), + script_line_ends, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_context_data = + Accessors::ScriptContextDataInfo(isolate(), attribs); { CallbacksDescriptor d( - *context_data_string, *script_context_data, attribs); - script_map->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(script_context_data->name())), + script_context_data, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_eval_from_script = + Accessors::ScriptEvalFromScriptInfo(isolate(), attribs); { CallbacksDescriptor d( - *eval_from_script_string, *script_eval_from_script, attribs); - script_map->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(script_eval_from_script->name())), + script_eval_from_script, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_eval_from_script_position = + Accessors::ScriptEvalFromScriptPositionInfo(isolate(), attribs); { CallbacksDescriptor d( - *eval_from_script_position_string, - *script_eval_from_script_position, - attribs); - script_map->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(script_eval_from_script_position->name())), + script_eval_from_script_position, attribs); + script_map->AppendDescriptor(&d); } + Handle<AccessorInfo> script_eval_from_function_name = + Accessors::ScriptEvalFromFunctionNameInfo(isolate(), attribs); { CallbacksDescriptor d( - *eval_from_function_name_string, - *script_eval_from_function_name, - attribs); - script_map->AppendDescriptor(&d, witness); + Handle<Name>(Name::cast(script_eval_from_function_name->name())), + script_eval_from_function_name, attribs); + script_map->AppendDescriptor(&d); } // Allocate the empty script. @@ -1938,8 +1884,8 @@ bool Genesis::InstallNatives() { // Install Function.prototype.call and apply. { Handle<String> key = factory()->function_class_string(); Handle<JSFunction> function = - Handle<JSFunction>::cast( - GetProperty(isolate(), isolate()->global_object(), key)); + Handle<JSFunction>::cast(Object::GetProperty( + isolate()->global_object(), key).ToHandleChecked()); Handle<JSObject> proto = Handle<JSObject>(JSObject::cast(function->instance_prototype())); @@ -1991,38 +1937,36 @@ bool Genesis::InstallNatives() { initial_map->set_prototype(*array_prototype); // Update map with length accessor from Array and add "index" and "input". - Handle<DescriptorArray> reresult_descriptors = - factory()->NewDescriptorArray(0, 3); - DescriptorArray::WhitenessWitness witness(*reresult_descriptors); - initial_map->set_instance_descriptors(*reresult_descriptors); + Map::EnsureDescriptorSlack(initial_map, 3); { JSFunction* array_function = native_context()->array_function(); Handle<DescriptorArray> array_descriptors( array_function->initial_map()->instance_descriptors()); - String* length = heap()->length_string(); + Handle<String> length = factory()->length_string(); int old = array_descriptors->SearchWithCache( - length, array_function->initial_map()); + *length, array_function->initial_map()); ASSERT(old != DescriptorArray::kNotFound); CallbacksDescriptor desc(length, - array_descriptors->GetValue(old), + handle(array_descriptors->GetValue(old), + isolate()), array_descriptors->GetDetails(old).attributes()); - initial_map->AppendDescriptor(&desc, witness); + initial_map->AppendDescriptor(&desc); } { - FieldDescriptor index_field(heap()->index_string(), + FieldDescriptor index_field(factory()->index_string(), JSRegExpResult::kIndexIndex, NONE, Representation::Tagged()); - initial_map->AppendDescriptor(&index_field, witness); + initial_map->AppendDescriptor(&index_field); } { - FieldDescriptor input_field(heap()->input_string(), + FieldDescriptor input_field(factory()->input_string(), JSRegExpResult::kInputIndex, NONE, Representation::Tagged()); - initial_map->AppendDescriptor(&input_field, witness); + initial_map->AppendDescriptor(&input_field); } initial_map->set_inobject_properties(2); @@ -2033,7 +1977,7 @@ bool Genesis::InstallNatives() { } #ifdef VERIFY_HEAP - builtins->Verify(); + builtins->ObjectVerify(); #endif return true; @@ -2076,8 +2020,8 @@ static Handle<JSObject> ResolveBuiltinIdHolder( Handle<GlobalObject> global(native_context->global_object()); const char* period_pos = strchr(holder_expr, '.'); if (period_pos == NULL) { - return Handle<JSObject>::cast(GetProperty( - isolate, global, factory->InternalizeUtf8String(holder_expr))); + return Handle<JSObject>::cast(Object::GetPropertyOrElement( + global, factory->InternalizeUtf8String(holder_expr)).ToHandleChecked()); } ASSERT_EQ(".prototype", period_pos); Vector<const char> property(holder_expr, @@ -2085,7 +2029,7 @@ static Handle<JSObject> ResolveBuiltinIdHolder( Handle<String> property_string = factory->InternalizeUtf8String(property); ASSERT(!property_string.is_null()); Handle<JSFunction> function = Handle<JSFunction>::cast( - GetProperty(isolate, global, property_string)); + Object::GetProperty(global, property_string).ToHandleChecked()); return Handle<JSObject>(JSObject::cast(function->prototype())); } @@ -2093,10 +2037,10 @@ static Handle<JSObject> ResolveBuiltinIdHolder( static void InstallBuiltinFunctionId(Handle<JSObject> holder, const char* function_name, BuiltinFunctionId id) { - Factory* factory = holder->GetIsolate()->factory(); - Handle<String> name = factory->InternalizeUtf8String(function_name); - Object* function_object = holder->GetProperty(*name)->ToObjectUnchecked(); - Handle<JSFunction> function(JSFunction::cast(function_object)); + Isolate* isolate = holder->GetIsolate(); + Handle<Object> function_object = + Object::GetProperty(isolate, holder, function_name).ToHandleChecked(); + Handle<JSFunction> function = Handle<JSFunction>::cast(function_object); function->shared()->set_function_data(Smi::FromInt(id)); } @@ -2169,9 +2113,8 @@ void Genesis::InstallJSFunctionResultCaches() { void Genesis::InitializeNormalizedMapCaches() { - Handle<FixedArray> array( - factory()->NewFixedArray(NormalizedMapCache::kEntries, TENURED)); - native_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array)); + Handle<NormalizedMapCache> cache = NormalizedMapCache::New(isolate()); + native_context()->set_normalized_map_cache(*cache); } @@ -2195,23 +2138,27 @@ bool Genesis::InstallSpecialObjects(Handle<Context> native_context) { if (FLAG_expose_natives_as != NULL && strlen(FLAG_expose_natives_as) != 0) { Handle<String> natives = factory->InternalizeUtf8String(FLAG_expose_natives_as); - JSObject::SetLocalPropertyIgnoreAttributes( - global, natives, Handle<JSObject>(global->builtins()), DONT_ENUM); - if (isolate->has_pending_exception()) return false; + RETURN_ON_EXCEPTION_VALUE( + isolate, + JSObject::SetLocalPropertyIgnoreAttributes( + global, natives, Handle<JSObject>(global->builtins()), DONT_ENUM), + false); } - Handle<Object> Error = GetProperty(global, "Error"); + Handle<Object> Error = Object::GetProperty( + isolate, global, "Error").ToHandleChecked(); if (Error->IsJSObject()) { Handle<String> name = factory->InternalizeOneByteString( STATIC_ASCII_VECTOR("stackTraceLimit")); Handle<Smi> stack_trace_limit( Smi::FromInt(FLAG_stack_trace_limit), isolate); - JSObject::SetLocalPropertyIgnoreAttributes( - Handle<JSObject>::cast(Error), name, stack_trace_limit, NONE); - if (isolate->has_pending_exception()) return false; + RETURN_ON_EXCEPTION_VALUE( + isolate, + JSObject::SetLocalPropertyIgnoreAttributes( + Handle<JSObject>::cast(Error), name, stack_trace_limit, NONE), + false); } -#ifdef ENABLE_DEBUGGER_SUPPORT // Expose the debug global object in global if a name for it is specified. if (FLAG_expose_debug_as != NULL && strlen(FLAG_expose_debug_as) != 0) { Debug* debug = isolate->debug(); @@ -2228,11 +2175,12 @@ bool Genesis::InstallSpecialObjects(Handle<Context> native_context) { factory->InternalizeUtf8String(FLAG_expose_debug_as); Handle<Object> global_proxy( debug->debug_context()->global_proxy(), isolate); - JSObject::SetLocalPropertyIgnoreAttributes( - global, debug_string, global_proxy, DONT_ENUM); - if (isolate->has_pending_exception()) return false; + RETURN_ON_EXCEPTION_VALUE( + isolate, + JSObject::SetLocalPropertyIgnoreAttributes( + global, debug_string, global_proxy, DONT_ENUM), + false); } -#endif return true; } @@ -2242,12 +2190,7 @@ static uint32_t Hash(RegisteredExtension* extension) { } -static bool MatchRegisteredExtensions(void* key1, void* key2) { - return key1 == key2; -} - -Genesis::ExtensionStates::ExtensionStates() - : map_(MatchRegisteredExtensions, 8) { } +Genesis::ExtensionStates::ExtensionStates() : map_(HashMap::PointersMatch, 8) {} Genesis::ExtensionTraversalState Genesis::ExtensionStates::get_state( RegisteredExtension* extension) { @@ -2351,10 +2294,10 @@ bool Genesis::InstallExtension(Isolate* isolate, return false; } } - Handle<String> source_code = - isolate->factory()->NewExternalStringFromAscii(extension->source()); // We do not expect this to throw an exception. Change this if it does. - CHECK_NOT_EMPTY_HANDLE(isolate, source_code); + Handle<String> source_code = + isolate->factory()->NewExternalStringFromAscii( + extension->source()).ToHandleChecked(); bool result = CompileScriptCached(isolate, CStrVector(extension->name()), source_code, @@ -2382,11 +2325,9 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) { HandleScope scope(isolate()); for (int i = 0; i < Builtins::NumberOfJavaScriptBuiltins(); i++) { Builtins::JavaScript id = static_cast<Builtins::JavaScript>(i); - Handle<String> name = - factory()->InternalizeUtf8String(Builtins::GetName(id)); - Object* function_object = builtins->GetPropertyNoExceptionThrown(*name); - Handle<JSFunction> function - = Handle<JSFunction>(JSFunction::cast(function_object)); + Handle<Object> function_object = Object::GetProperty( + isolate(), builtins, Builtins::GetName(id)).ToHandleChecked(); + Handle<JSFunction> function = Handle<JSFunction>::cast(function_object); builtins->set_javascript_builtin(id, *function); if (!Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) { return false; @@ -2435,10 +2376,10 @@ bool Genesis::ConfigureApiObject(Handle<JSObject> object, ASSERT(FunctionTemplateInfo::cast(object_template->constructor()) ->IsTemplateFor(object->map()));; - bool pending_exception = false; - Handle<JSObject> obj = - Execution::InstantiateObject(object_template, &pending_exception); - if (pending_exception) { + MaybeHandle<JSObject> maybe_obj = + Execution::InstantiateObject(object_template); + Handle<JSObject> obj; + if (!maybe_obj.ToHandle(&obj)) { ASSERT(isolate()->has_pending_exception()); isolate()->clear_pending_exception(); return false; @@ -2463,29 +2404,27 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, ASSERT(!descs->GetDetails(i).representation().IsDouble()); Handle<Object> value = Handle<Object>(from->RawFastPropertyAt(index), isolate()); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - to, key, value, details.attributes())); + JSObject::SetLocalPropertyIgnoreAttributes( + to, key, value, details.attributes()).Check(); break; } case CONSTANT: { HandleScope inner(isolate()); Handle<Name> key = Handle<Name>(descs->GetKey(i)); Handle<Object> constant(descs->GetConstant(i), isolate()); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - to, key, constant, details.attributes())); + JSObject::SetLocalPropertyIgnoreAttributes( + to, key, constant, details.attributes()).Check(); break; } case CALLBACKS: { LookupResult result(isolate()); - to->LocalLookup(descs->GetKey(i), &result); + Handle<Name> key(Name::cast(descs->GetKey(i)), isolate()); + to->LocalLookup(key, &result); // If the property is already there we skip it if (result.IsFound()) continue; HandleScope inner(isolate()); ASSERT(!to->HasFastProperties()); // Add to dictionary. - Handle<Name> key = Handle<Name>(descs->GetKey(i)); Handle<Object> callbacks(descs->GetCallbacksObject(i), isolate()); PropertyDetails d = PropertyDetails( details.attributes(), CALLBACKS, i + 1); @@ -2496,7 +2435,6 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, // Do not occur since the from object has fast properties. case HANDLER: case INTERCEPTOR: - case TRANSITION: case NONEXISTENT: // No element in instance descriptors have proxy or interceptor type. UNREACHABLE(); @@ -2513,10 +2451,10 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, ASSERT(raw_key->IsName()); // If the property is already there we skip it. LookupResult result(isolate()); - to->LocalLookup(Name::cast(raw_key), &result); + Handle<Name> key(Name::cast(raw_key)); + to->LocalLookup(key, &result); if (result.IsFound()) continue; // Set the property. - Handle<Name> key = Handle<Name>(Name::cast(raw_key)); Handle<Object> value = Handle<Object>(properties->ValueAt(i), isolate()); ASSERT(!value->IsCell()); @@ -2525,9 +2463,8 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, isolate()); } PropertyDetails details = properties->DetailsAt(i); - CHECK_NOT_EMPTY_HANDLE(isolate(), - JSObject::SetLocalPropertyIgnoreAttributes( - to, key, value, details.attributes())); + JSObject::SetLocalPropertyIgnoreAttributes( + to, key, value, details.attributes()).Check(); } } } @@ -2555,7 +2492,7 @@ void Genesis::TransferObject(Handle<JSObject> from, Handle<JSObject> to) { // Transfer the prototype (new map is needed). Handle<Map> old_to_map = Handle<Map>(to->map()); - Handle<Map> new_to_map = factory()->CopyMap(old_to_map); + Handle<Map> new_to_map = Map::Copy(old_to_map); new_to_map->set_prototype(from->map()->prototype()); to->set_map(*new_to_map); } @@ -2578,20 +2515,23 @@ void Genesis::MakeFunctionInstancePrototypeWritable() { class NoTrackDoubleFieldsForSerializerScope { public: - NoTrackDoubleFieldsForSerializerScope() : flag_(FLAG_track_double_fields) { - if (Serializer::enabled()) { + explicit NoTrackDoubleFieldsForSerializerScope(Isolate* isolate) + : isolate_(isolate), flag_(FLAG_track_double_fields) { + if (Serializer::enabled(isolate)) { // Disable tracking double fields because heap numbers treated as // immutable by the serializer. FLAG_track_double_fields = false; } } + ~NoTrackDoubleFieldsForSerializerScope() { - if (Serializer::enabled()) { + if (Serializer::enabled(isolate_)) { FLAG_track_double_fields = flag_; } } private: + Isolate* isolate_; bool flag_; }; @@ -2602,7 +2542,7 @@ Genesis::Genesis(Isolate* isolate, v8::ExtensionConfiguration* extensions) : isolate_(isolate), active_(isolate->bootstrapper()) { - NoTrackDoubleFieldsForSerializerScope disable_double_tracking_for_serializer; + NoTrackDoubleFieldsForSerializerScope disable_scope(isolate); result_ = Handle<Context>::null(); // If V8 cannot be initialized, just return. if (!V8::Initialize(NULL)) return; @@ -2668,7 +2608,7 @@ Genesis::Genesis(Isolate* isolate, // We can't (de-)serialize typed arrays currently, but we are lucky: The state // of the random number generator needs no initialization during snapshot // creation time and we don't need trigonometric functions then. - if (!Serializer::enabled()) { + if (!Serializer::enabled(isolate)) { // Initially seed the per-context random number generator using the // per-isolate random number generator. const int num_elems = 2; @@ -2684,11 +2624,11 @@ Genesis::Genesis(Isolate* isolate, Utils::OpenHandle(*buffer)->set_should_be_freed(true); v8::Local<v8::Uint32Array> ta = v8::Uint32Array::New(buffer, 0, num_elems); Handle<JSBuiltinsObject> builtins(native_context()->builtins()); - ForceSetProperty(builtins, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("rngstate")), - Utils::OpenHandle(*ta), - NONE); + Runtime::ForceSetObjectProperty(builtins, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("rngstate")), + Utils::OpenHandle(*ta), + NONE).Assert(); // Initialize trigonometric lookup tables and constants. const int table_num_bytes = TrigonometricLookupTable::table_num_bytes(); @@ -2703,28 +2643,31 @@ Genesis::Genesis(Isolate* isolate, v8::Local<v8::Float64Array> cos_table = v8::Float64Array::New( cos_buffer, 0, TrigonometricLookupTable::table_size()); - ForceSetProperty(builtins, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("kSinTable")), - Utils::OpenHandle(*sin_table), - NONE); - ForceSetProperty(builtins, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("kCosXIntervalTable")), - Utils::OpenHandle(*cos_table), - NONE); - ForceSetProperty(builtins, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("kSamples")), - factory()->NewHeapNumber( - TrigonometricLookupTable::samples()), - NONE); - ForceSetProperty(builtins, - factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("kIndexConvert")), - factory()->NewHeapNumber( - TrigonometricLookupTable::samples_over_pi_half()), - NONE); + Runtime::ForceSetObjectProperty(builtins, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("kSinTable")), + Utils::OpenHandle(*sin_table), + NONE).Assert(); + Runtime::ForceSetObjectProperty( + builtins, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("kCosXIntervalTable")), + Utils::OpenHandle(*cos_table), + NONE).Assert(); + Runtime::ForceSetObjectProperty( + builtins, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("kSamples")), + factory()->NewHeapNumber( + TrigonometricLookupTable::samples()), + NONE).Assert(); + Runtime::ForceSetObjectProperty( + builtins, + factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("kIndexConvert")), + factory()->NewHeapNumber( + TrigonometricLookupTable::samples_over_pi_half()), + NONE).Assert(); } result_ = native_context(); diff --git a/deps/v8/src/bootstrapper.h b/deps/v8/src/bootstrapper.h index e683a45f0..f6fcd02ef 100644 --- a/deps/v8/src/bootstrapper.h +++ b/deps/v8/src/bootstrapper.h @@ -1,46 +1,21 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_BOOTSTRAPPER_H_ #define V8_BOOTSTRAPPER_H_ -#include "allocation.h" +#include "factory.h" namespace v8 { namespace internal { - // A SourceCodeCache uses a FixedArray to store pairs of // (AsciiString*, JSFunction*), mapping names of native code files // (runtime.js, etc.) to precompiled functions. Instead of mapping // names to functions it might make sense to let the JS2C tool // generate an index for each native JS file. -class SourceCodeCache BASE_EMBEDDED { +class SourceCodeCache V8_FINAL BASE_EMBEDDED { public: explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { } @@ -72,7 +47,8 @@ class SourceCodeCache BASE_EMBEDDED { Handle<FixedArray> new_array = factory->NewFixedArray(length + 2, TENURED); cache_->CopyTo(0, *new_array, 0, cache_->length()); cache_ = *new_array; - Handle<String> str = factory->NewStringFromAscii(name, TENURED); + Handle<String> str = + factory->NewStringFromAscii(name, TENURED).ToHandleChecked(); ASSERT(!str.is_null()); cache_->set(length, *str); cache_->set(length + 1, *shared); @@ -88,7 +64,7 @@ class SourceCodeCache BASE_EMBEDDED { // The Boostrapper is the public interface for creating a JavaScript global // context. -class Bootstrapper { +class Bootstrapper V8_FINAL { public: static void InitializeOncePerProcess(); static void TearDownExtensions(); @@ -158,7 +134,7 @@ class Bootstrapper { }; -class BootstrapperActive BASE_EMBEDDED { +class BootstrapperActive V8_FINAL BASE_EMBEDDED { public: explicit BootstrapperActive(Bootstrapper* bootstrapper) : bootstrapper_(bootstrapper) { @@ -176,20 +152,15 @@ class BootstrapperActive BASE_EMBEDDED { }; -class NativesExternalStringResource +class NativesExternalStringResource V8_FINAL : public v8::String::ExternalAsciiStringResource { public: NativesExternalStringResource(Bootstrapper* bootstrapper, const char* source, size_t length); + virtual const char* data() const V8_OVERRIDE { return data_; } + virtual size_t length() const V8_OVERRIDE { return length_; } - const char* data() const { - return data_; - } - - size_t length() const { - return length_; - } private: const char* data_; size_t length_; diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc index 689e845ba..d0c1a446a 100644 --- a/deps/v8/src/builtins.cc +++ b/deps/v8/src/builtins.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -127,28 +104,28 @@ BUILTIN_LIST_C(DEF_ARG_TYPE) #ifdef DEBUG #define BUILTIN(name) \ - MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \ + MUST_USE_RESULT static Object* Builtin_Impl_##name( \ name##ArgumentsType args, Isolate* isolate); \ - MUST_USE_RESULT static MaybeObject* Builtin_##name( \ + MUST_USE_RESULT static Object* Builtin_##name( \ int args_length, Object** args_object, Isolate* isolate) { \ name##ArgumentsType args(args_length, args_object); \ args.Verify(); \ return Builtin_Impl_##name(args, isolate); \ } \ - MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \ + MUST_USE_RESULT static Object* Builtin_Impl_##name( \ name##ArgumentsType args, Isolate* isolate) #else // For release mode. #define BUILTIN(name) \ - static MaybeObject* Builtin_impl##name( \ + static Object* Builtin_impl##name( \ name##ArgumentsType args, Isolate* isolate); \ - static MaybeObject* Builtin_##name( \ + static Object* Builtin_##name( \ int args_length, Object** args_object, Isolate* isolate) { \ name##ArgumentsType args(args_length, args_object); \ return Builtin_impl##name(args, isolate); \ } \ - static MaybeObject* Builtin_impl##name( \ + static Object* Builtin_impl##name( \ name##ArgumentsType args, Isolate* isolate) #endif @@ -207,22 +184,11 @@ static void MoveDoubleElements(FixedDoubleArray* dst, } -static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) { - ASSERT(dst->map() != heap->fixed_cow_array_map()); - MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from); -} - - -static void FillWithHoles(FixedDoubleArray* dst, int from, int to) { - for (int i = from; i < to; i++) { - dst->set_the_hole(i); - } -} - - static FixedArrayBase* LeftTrimFixedArray(Heap* heap, FixedArrayBase* elms, int to_trim) { + ASSERT(heap->CanMoveObjectStart(elms)); + Map* map = elms->map(); int entry_size; if (elms->IsFixedArray()) { @@ -259,6 +225,8 @@ static FixedArrayBase* LeftTrimFixedArray(Heap* heap, // Technically in new space this write might be omitted (except for // debug mode which iterates through the heap), but to play safer // we still do it. + // Since left trimming is only performed on pages which are not concurrently + // swept creating a filler object does not require synchronization. heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size); int new_start_index = to_trim * (entry_size / kPointerSize); @@ -287,6 +255,7 @@ static FixedArrayBase* LeftTrimFixedArray(Heap* heap, static bool ArrayPrototypeHasNoElements(Heap* heap, Context* native_context, JSObject* array_proto) { + DisallowHeapAllocation no_gc; // This method depends on non writability of Object and Array prototype // fields. if (array_proto->elements() != heap->empty_fixed_array()) return false; @@ -302,16 +271,21 @@ static bool ArrayPrototypeHasNoElements(Heap* heap, // Returns empty handle if not applicable. MUST_USE_RESULT -static inline Handle<FixedArrayBase> EnsureJSArrayWithWritableFastElements( +static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements( Isolate* isolate, Handle<Object> receiver, Arguments* args, int first_added_arg) { - if (!receiver->IsJSArray()) return Handle<FixedArrayBase>::null(); + if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>(); Handle<JSArray> array = Handle<JSArray>::cast(receiver); - if (array->map()->is_observed()) return Handle<FixedArrayBase>::null(); - if (!array->map()->is_extensible()) return Handle<FixedArrayBase>::null(); - Handle<FixedArrayBase> elms(array->elements()); + // If there may be elements accessors in the prototype chain, the fast path + // cannot be used if there arguments to add to the array. + if (args != NULL && array->map()->DictionaryElementsInPrototypeChainOnly()) { + return MaybeHandle<FixedArrayBase>(); + } + if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>(); + if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>(); + Handle<FixedArrayBase> elms(array->elements(), isolate); Heap* heap = isolate->heap(); Map* map = elms->map(); if (map == heap->fixed_array_map()) { @@ -322,42 +296,45 @@ static inline Handle<FixedArrayBase> EnsureJSArrayWithWritableFastElements( } else if (map == heap->fixed_double_array_map()) { if (args == NULL) return elms; } else { - return Handle<FixedArrayBase>::null(); + return MaybeHandle<FixedArrayBase>(); } // Need to ensure that the arguments passed in args can be contained in // the array. int args_length = args->length(); - if (first_added_arg >= args_length) return handle(array->elements()); + if (first_added_arg >= args_length) return handle(array->elements(), isolate); ElementsKind origin_kind = array->map()->elements_kind(); ASSERT(!IsFastObjectElementsKind(origin_kind)); ElementsKind target_kind = origin_kind; - int arg_count = args->length() - first_added_arg; - Object** arguments = args->arguments() - first_added_arg - (arg_count - 1); - for (int i = 0; i < arg_count; i++) { - Object* arg = arguments[i]; - if (arg->IsHeapObject()) { - if (arg->IsHeapNumber()) { - target_kind = FAST_DOUBLE_ELEMENTS; - } else { - target_kind = FAST_ELEMENTS; - break; + { + DisallowHeapAllocation no_gc; + int arg_count = args->length() - first_added_arg; + Object** arguments = args->arguments() - first_added_arg - (arg_count - 1); + for (int i = 0; i < arg_count; i++) { + Object* arg = arguments[i]; + if (arg->IsHeapObject()) { + if (arg->IsHeapNumber()) { + target_kind = FAST_DOUBLE_ELEMENTS; + } else { + target_kind = FAST_ELEMENTS; + break; + } } } } if (target_kind != origin_kind) { JSObject::TransitionElementsKind(array, target_kind); - return handle(array->elements()); + return handle(array->elements(), isolate); } return elms; } -// TODO(ishell): Handlify when all Array* builtins are handlified. static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap, JSArray* receiver) { if (!FLAG_clever_optimizations) return false; + DisallowHeapAllocation no_gc; Context* native_context = heap->isolate()->context()->native_context(); JSObject* array_proto = JSObject::cast(native_context->array_function()->prototype()); @@ -366,29 +343,30 @@ static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap, } -MUST_USE_RESULT static MaybeObject* CallJsBuiltin( +MUST_USE_RESULT static Object* CallJsBuiltin( Isolate* isolate, const char* name, BuiltinArguments<NO_EXTRA_ARGUMENTS> args) { HandleScope handleScope(isolate); - Handle<Object> js_builtin = - GetProperty(Handle<JSObject>(isolate->native_context()->builtins()), - name); + Handle<Object> js_builtin = Object::GetProperty( + isolate, + handle(isolate->native_context()->builtins(), isolate), + name).ToHandleChecked(); Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin); int argc = args.length() - 1; ScopedVector<Handle<Object> > argv(argc); for (int i = 0; i < argc; ++i) { argv[i] = args.at<Object>(i + 1); } - bool pending_exception; - Handle<Object> result = Execution::Call(isolate, - function, - args.receiver(), - argc, - argv.start(), - &pending_exception); - if (pending_exception) return Failure::Exception(); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, + function, + args.receiver(), + argc, + argv.start())); return *result; } @@ -396,9 +374,12 @@ MUST_USE_RESULT static MaybeObject* CallJsBuiltin( BUILTIN(ArrayPush) { HandleScope scope(isolate); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms_obj = + MaybeHandle<FixedArrayBase> maybe_elms_obj = EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1); - if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPush", args); + Handle<FixedArrayBase> elms_obj; + if (!maybe_elms_obj.ToHandle(&elms_obj)) { + return CallJsBuiltin(isolate, "ArrayPush", args); + } Handle<JSArray> array = Handle<JSArray>::cast(receiver); ASSERT(!array->map()->is_observed()); @@ -427,8 +408,8 @@ BUILTIN(ArrayPush) { ElementsAccessor* accessor = array->GetElementsAccessor(); accessor->CopyElements( - Handle<JSObject>::null(), 0, kind, new_elms, 0, - ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj); + elms_obj, 0, kind, new_elms, 0, + ElementsAccessor::kCopyToEndAndInitializeToHole); elms = new_elms; } @@ -466,12 +447,15 @@ BUILTIN(ArrayPush) { if (new_length > elms_len) { // New backing storage is needed. int capacity = new_length + (new_length >> 1) + 16; - new_elms = isolate->factory()->NewFixedDoubleArray(capacity); + // Create new backing store; since capacity > 0, we can + // safely cast to FixedDoubleArray. + new_elms = Handle<FixedDoubleArray>::cast( + isolate->factory()->NewFixedDoubleArray(capacity)); ElementsAccessor* accessor = array->GetElementsAccessor(); accessor->CopyElements( - Handle<JSObject>::null(), 0, kind, new_elms, 0, - ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj); + elms_obj, 0, kind, new_elms, 0, + ElementsAccessor::kCopyToEndAndInitializeToHole); } else { // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the @@ -498,24 +482,15 @@ BUILTIN(ArrayPush) { } -// TODO(ishell): Temporary wrapper until handlified. -static bool ElementsAccessorHasElementWrapper( - ElementsAccessor* accessor, - Handle<Object> receiver, - Handle<JSObject> holder, - uint32_t key, - Handle<FixedArrayBase> backing_store = Handle<FixedArrayBase>::null()) { - return accessor->HasElement(*receiver, *holder, key, - backing_store.is_null() ? NULL : *backing_store); -} - - BUILTIN(ArrayPop) { HandleScope scope(isolate); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms_obj = + MaybeHandle<FixedArrayBase> maybe_elms_obj = EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0); - if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPop", args); + Handle<FixedArrayBase> elms_obj; + if (!maybe_elms_obj.ToHandle(&elms_obj)) { + return CallJsBuiltin(isolate, "ArrayPop", args); + } Handle<JSArray> array = Handle<JSArray>::cast(receiver); ASSERT(!array->map()->is_observed()); @@ -525,19 +500,14 @@ BUILTIN(ArrayPop) { ElementsAccessor* accessor = array->GetElementsAccessor(); int new_length = len - 1; - Handle<Object> element; - if (ElementsAccessorHasElementWrapper( - accessor, array, array, new_length, elms_obj)) { - element = accessor->Get( - array, array, new_length, elms_obj); - } else { - Handle<Object> proto(array->GetPrototype(), isolate); - element = Object::GetElement(isolate, proto, len - 1); + Handle<Object> element = + accessor->Get(array, array, new_length, elms_obj).ToHandleChecked(); + if (element->IsTheHole()) { + return CallJsBuiltin(isolate, "ArrayPop", args); } - RETURN_IF_EMPTY_HANDLE(isolate, element); - RETURN_IF_EMPTY_HANDLE(isolate, - accessor->SetLength( - array, handle(Smi::FromInt(new_length), isolate))); + RETURN_FAILURE_ON_EXCEPTION( + isolate, + accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate))); return *element; } @@ -546,9 +516,10 @@ BUILTIN(ArrayShift) { HandleScope scope(isolate); Heap* heap = isolate->heap(); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms_obj = + MaybeHandle<FixedArrayBase> maybe_elms_obj = EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0); - if (elms_obj.is_null() || + Handle<FixedArrayBase> elms_obj; + if (!maybe_elms_obj.ToHandle(&elms_obj) || !IsJSArrayFastElementMovingAllowed(heap, *Handle<JSArray>::cast(receiver))) { return CallJsBuiltin(isolate, "ArrayShift", args); @@ -561,13 +532,13 @@ BUILTIN(ArrayShift) { // Get first element ElementsAccessor* accessor = array->GetElementsAccessor(); - Handle<Object> first = accessor->Get(receiver, array, 0, elms_obj); - RETURN_IF_EMPTY_HANDLE(isolate, first); + Handle<Object> first = + accessor->Get(array, array, 0, elms_obj).ToHandleChecked(); if (first->IsTheHole()) { - first = isolate->factory()->undefined_value(); + return CallJsBuiltin(isolate, "ArrayShift", args); } - if (!heap->CanMoveObjectStart(*elms_obj)) { + if (heap->CanMoveObjectStart(*elms_obj)) { array->set_elements(LeftTrimFixedArray(heap, *elms_obj, 1)); } else { // Shift the elements. @@ -594,9 +565,10 @@ BUILTIN(ArrayUnshift) { HandleScope scope(isolate); Heap* heap = isolate->heap(); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms_obj = + MaybeHandle<FixedArrayBase> maybe_elms_obj = EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0); - if (elms_obj.is_null() || + Handle<FixedArrayBase> elms_obj; + if (!maybe_elms_obj.ToHandle(&elms_obj) || !IsJSArrayFastElementMovingAllowed(heap, *Handle<JSArray>::cast(receiver))) { return CallJsBuiltin(isolate, "ArrayUnshift", args); @@ -627,8 +599,8 @@ BUILTIN(ArrayUnshift) { ElementsKind kind = array->GetElementsKind(); ElementsAccessor* accessor = array->GetElementsAccessor(); accessor->CopyElements( - Handle<JSObject>::null(), 0, kind, new_elms, to_add, - ElementsAccessor::kCopyToEndAndInitializeToHole, elms); + elms, 0, kind, new_elms, to_add, + ElementsAccessor::kCopyToEndAndInitializeToHole); elms = new_elms; array->set_elements(*elms); @@ -654,87 +626,95 @@ BUILTIN(ArraySlice) { HandleScope scope(isolate); Heap* heap = isolate->heap(); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms; int len = -1; - if (receiver->IsJSArray()) { - Handle<JSArray> array = Handle<JSArray>::cast(receiver); - if (!IsJSArrayFastElementMovingAllowed(heap, *array)) { - return CallJsBuiltin(isolate, "ArraySlice", args); - } - - if (array->HasFastElements()) { - elms = handle(array->elements()); - } else { - return CallJsBuiltin(isolate, "ArraySlice", args); - } + int relative_start = 0; + int relative_end = 0; + { + DisallowHeapAllocation no_gc; + if (receiver->IsJSArray()) { + JSArray* array = JSArray::cast(*receiver); + if (!IsJSArrayFastElementMovingAllowed(heap, array)) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } - len = Smi::cast(array->length())->value(); - } else { - // Array.slice(arguments, ...) is quite a common idiom (notably more - // than 50% of invocations in Web apps). Treat it in C++ as well. - Handle<Map> arguments_map(isolate->context()->native_context()-> - sloppy_arguments_boilerplate()->map()); - - bool is_arguments_object_with_fast_elements = - receiver->IsJSObject() && - Handle<JSObject>::cast(receiver)->map() == *arguments_map; - if (!is_arguments_object_with_fast_elements) { - return CallJsBuiltin(isolate, "ArraySlice", args); - } - Handle<JSObject> object = Handle<JSObject>::cast(receiver); + if (!array->HasFastElements()) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } - if (object->HasFastElements()) { - elms = handle(object->elements()); + len = Smi::cast(array->length())->value(); } else { - return CallJsBuiltin(isolate, "ArraySlice", args); - } - Handle<Object> len_obj( - object->InObjectPropertyAt(Heap::kArgumentsLengthIndex), isolate); - if (!len_obj->IsSmi()) { - return CallJsBuiltin(isolate, "ArraySlice", args); - } - len = Handle<Smi>::cast(len_obj)->value(); - if (len > elms->length()) { - return CallJsBuiltin(isolate, "ArraySlice", args); - } - } - - Handle<JSObject> object = Handle<JSObject>::cast(receiver); + // Array.slice(arguments, ...) is quite a common idiom (notably more + // than 50% of invocations in Web apps). Treat it in C++ as well. + Map* arguments_map = isolate->context()->native_context()-> + sloppy_arguments_boilerplate()->map(); + + bool is_arguments_object_with_fast_elements = + receiver->IsJSObject() && + JSObject::cast(*receiver)->map() == arguments_map; + if (!is_arguments_object_with_fast_elements) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } + JSObject* object = JSObject::cast(*receiver); - ASSERT(len >= 0); - int n_arguments = args.length() - 1; + if (!object->HasFastElements()) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } - // Note carefully choosen defaults---if argument is missing, - // it's undefined which gets converted to 0 for relative_start - // and to len for relative_end. - int relative_start = 0; - int relative_end = len; - if (n_arguments > 0) { - Handle<Object> arg1 = args.at<Object>(1); - if (arg1->IsSmi()) { - relative_start = Handle<Smi>::cast(arg1)->value(); - } else if (arg1->IsHeapNumber()) { - double start = Handle<HeapNumber>::cast(arg1)->value(); - if (start < kMinInt || start > kMaxInt) { + Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex); + if (!len_obj->IsSmi()) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } + len = Smi::cast(len_obj)->value(); + if (len > object->elements()->length()) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySlice", args); } - relative_start = std::isnan(start) ? 0 : static_cast<int>(start); - } else if (!arg1->IsUndefined()) { - return CallJsBuiltin(isolate, "ArraySlice", args); } - if (n_arguments > 1) { - Handle<Object> arg2 = args.at<Object>(2); - if (arg2->IsSmi()) { - relative_end = Handle<Smi>::cast(arg2)->value(); - } else if (arg2->IsHeapNumber()) { - double end = Handle<HeapNumber>::cast(arg2)->value(); - if (end < kMinInt || end > kMaxInt) { + + ASSERT(len >= 0); + int n_arguments = args.length() - 1; + + // Note carefully choosen defaults---if argument is missing, + // it's undefined which gets converted to 0 for relative_start + // and to len for relative_end. + relative_start = 0; + relative_end = len; + if (n_arguments > 0) { + Object* arg1 = args[1]; + if (arg1->IsSmi()) { + relative_start = Smi::cast(arg1)->value(); + } else if (arg1->IsHeapNumber()) { + double start = HeapNumber::cast(arg1)->value(); + if (start < kMinInt || start > kMaxInt) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySlice", args); } - relative_end = std::isnan(end) ? 0 : static_cast<int>(end); - } else if (!arg2->IsUndefined()) { + relative_start = std::isnan(start) ? 0 : static_cast<int>(start); + } else if (!arg1->IsUndefined()) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySlice", args); } + if (n_arguments > 1) { + Object* arg2 = args[2]; + if (arg2->IsSmi()) { + relative_end = Smi::cast(arg2)->value(); + } else if (arg2->IsHeapNumber()) { + double end = HeapNumber::cast(arg2)->value(); + if (end < kMinInt || end > kMaxInt) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } + relative_end = std::isnan(end) ? 0 : static_cast<int>(end); + } else if (!arg2->IsUndefined()) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArraySlice", args); + } + } } } @@ -749,13 +729,16 @@ BUILTIN(ArraySlice) { // Calculate the length of result array. int result_len = Max(final - k, 0); + Handle<JSObject> object = Handle<JSObject>::cast(receiver); + Handle<FixedArrayBase> elms(object->elements(), isolate); + ElementsKind kind = object->GetElementsKind(); if (IsHoleyElementsKind(kind)) { + DisallowHeapAllocation no_gc; bool packed = true; ElementsAccessor* accessor = ElementsAccessor::ForKind(kind); for (int i = k; i < final; i++) { - if (!ElementsAccessorHasElementWrapper( - accessor, object, object, i, elms)) { + if (!accessor->HasElement(object, object, i, elms)) { packed = false; break; } @@ -763,6 +746,7 @@ BUILTIN(ArraySlice) { if (packed) { kind = GetPackedElementsKind(kind); } else if (!receiver->IsJSArray()) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySlice", args); } } @@ -774,8 +758,8 @@ BUILTIN(ArraySlice) { if (result_len == 0) return *result_array; ElementsAccessor* accessor = object->GetElementsAccessor(); - accessor->CopyElements(Handle<JSObject>::null(), k, kind, - handle(result_array->elements()), 0, result_len, elms); + accessor->CopyElements( + elms, k, kind, handle(result_array->elements(), isolate), 0, result_len); return *result_array; } @@ -784,9 +768,10 @@ BUILTIN(ArraySplice) { HandleScope scope(isolate); Heap* heap = isolate->heap(); Handle<Object> receiver = args.receiver(); - Handle<FixedArrayBase> elms_obj = + MaybeHandle<FixedArrayBase> maybe_elms_obj = EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3); - if (elms_obj.is_null() || + Handle<FixedArrayBase> elms_obj; + if (!maybe_elms_obj.ToHandle(&elms_obj) || !IsJSArrayFastElementMovingAllowed(heap, *Handle<JSArray>::cast(receiver))) { return CallJsBuiltin(isolate, "ArraySplice", args); @@ -800,16 +785,19 @@ BUILTIN(ArraySplice) { int relative_start = 0; if (n_arguments > 0) { - Handle<Object> arg1 = args.at<Object>(1); + DisallowHeapAllocation no_gc; + Object* arg1 = args[1]; if (arg1->IsSmi()) { - relative_start = Handle<Smi>::cast(arg1)->value(); + relative_start = Smi::cast(arg1)->value(); } else if (arg1->IsHeapNumber()) { - double start = Handle<HeapNumber>::cast(arg1)->value(); + double start = HeapNumber::cast(arg1)->value(); if (start < kMinInt || start > kMaxInt) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySplice", args); } relative_start = std::isnan(start) ? 0 : static_cast<int>(start); } else if (!arg1->IsUndefined()) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySplice", args); } } @@ -828,10 +816,12 @@ BUILTIN(ArraySplice) { } else { int value = 0; // ToInteger(undefined) == 0 if (n_arguments > 1) { + DisallowHeapAllocation no_gc; Object* arg2 = args[2]; if (arg2->IsSmi()) { value = Smi::cast(arg2)->value(); } else { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArraySplice", args); } } @@ -865,8 +855,8 @@ BUILTIN(ArraySplice) { DisallowHeapAllocation no_gc; ElementsAccessor* accessor = array->GetElementsAccessor(); accessor->CopyElements( - Handle<JSObject>::null(), actual_start, elements_kind, - handle(result_array->elements()), 0, actual_delete_count, elms_obj); + elms_obj, actual_start, elements_kind, + handle(result_array->elements(), isolate), 0, actual_delete_count); } bool elms_changed = false; @@ -890,7 +880,7 @@ BUILTIN(ArraySplice) { if (heap->CanMoveObjectStart(*elms_obj)) { // On the fast path we move the start of the object in memory. - elms_obj = handle(LeftTrimFixedArray(heap, *elms_obj, delta)); + elms_obj = handle(LeftTrimFixedArray(heap, *elms_obj, delta), isolate); } else { // This is the slow path. We are going to move the elements to the left // by copying them. For trimmed values we store the hole. @@ -898,12 +888,12 @@ BUILTIN(ArraySplice) { Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj); MoveDoubleElements(*elms, 0, *elms, delta, len - delta); - FillWithHoles(*elms, len - delta, len); + elms->FillWithHoles(len - delta, len); } else { Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj); DisallowHeapAllocation no_gc; heap->MoveElements(*elms, 0, delta, len - delta); - FillWithHoles(heap, *elms, len - delta, len); + elms->FillWithHoles(len - delta, len); } } elms_changed = true; @@ -914,14 +904,14 @@ BUILTIN(ArraySplice) { MoveDoubleElements(*elms, actual_start + item_count, *elms, actual_start + actual_delete_count, (len - actual_delete_count - actual_start)); - FillWithHoles(*elms, new_length, len); + elms->FillWithHoles(new_length, len); } else { Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj); DisallowHeapAllocation no_gc; heap->MoveElements(*elms, actual_start + item_count, actual_start + actual_delete_count, (len - actual_delete_count - actual_start)); - FillWithHoles(heap, *elms, new_length, len); + elms->FillWithHoles(new_length, len); } } } else if (item_count > actual_delete_count) { @@ -944,12 +934,12 @@ BUILTIN(ArraySplice) { if (actual_start > 0) { // Copy the part before actual_start as is. accessor->CopyElements( - Handle<JSObject>::null(), 0, kind, new_elms, 0, actual_start, elms); + elms, 0, kind, new_elms, 0, actual_start); } accessor->CopyElements( - Handle<JSObject>::null(), actual_start + actual_delete_count, kind, + elms, actual_start + actual_delete_count, kind, new_elms, actual_start + item_count, - ElementsAccessor::kCopyToEndAndInitializeToHole, elms); + ElementsAccessor::kCopyToEndAndInitializeToHole); elms_obj = new_elms; elms_changed = true; @@ -992,51 +982,57 @@ BUILTIN(ArraySplice) { BUILTIN(ArrayConcat) { HandleScope scope(isolate); - Heap* heap = isolate->heap(); - Handle<Context> native_context(isolate->context()->native_context()); - Handle<JSObject> array_proto( - JSObject::cast(native_context->array_function()->prototype())); - if (!ArrayPrototypeHasNoElements(heap, *native_context, *array_proto)) { - return CallJsBuiltin(isolate, "ArrayConcat", args); - } - // Iterate through all the arguments performing checks - // and calculating total length. int n_arguments = args.length(); int result_len = 0; ElementsKind elements_kind = GetInitialFastElementsKind(); bool has_double = false; - bool is_holey = false; - for (int i = 0; i < n_arguments; i++) { - Handle<Object> arg = args.at<Object>(i); - if (!arg->IsJSArray() || - !Handle<JSArray>::cast(arg)->HasFastElements() || - Handle<JSArray>::cast(arg)->GetPrototype() != *array_proto) { + { + DisallowHeapAllocation no_gc; + Heap* heap = isolate->heap(); + Context* native_context = isolate->context()->native_context(); + JSObject* array_proto = + JSObject::cast(native_context->array_function()->prototype()); + if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) { + AllowHeapAllocation allow_allocation; return CallJsBuiltin(isolate, "ArrayConcat", args); } - int len = Smi::cast(Handle<JSArray>::cast(arg)->length())->value(); - - // We shouldn't overflow when adding another len. - const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2); - STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt); - USE(kHalfOfMaxInt); - result_len += len; - ASSERT(result_len >= 0); - if (result_len > FixedDoubleArray::kMaxLength) { - return CallJsBuiltin(isolate, "ArrayConcat", args); - } + // Iterate through all the arguments performing checks + // and calculating total length. + bool is_holey = false; + for (int i = 0; i < n_arguments; i++) { + Object* arg = args[i]; + if (!arg->IsJSArray() || + !JSArray::cast(arg)->HasFastElements() || + JSArray::cast(arg)->GetPrototype() != array_proto) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArrayConcat", args); + } + int len = Smi::cast(JSArray::cast(arg)->length())->value(); + + // We shouldn't overflow when adding another len. + const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2); + STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt); + USE(kHalfOfMaxInt); + result_len += len; + ASSERT(result_len >= 0); + + if (result_len > FixedDoubleArray::kMaxLength) { + AllowHeapAllocation allow_allocation; + return CallJsBuiltin(isolate, "ArrayConcat", args); + } - ElementsKind arg_kind = Handle<JSArray>::cast(arg)->map()->elements_kind(); - has_double = has_double || IsFastDoubleElementsKind(arg_kind); - is_holey = is_holey || IsFastHoleyElementsKind(arg_kind); - if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) { - elements_kind = arg_kind; + ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind(); + has_double = has_double || IsFastDoubleElementsKind(arg_kind); + is_holey = is_holey || IsFastHoleyElementsKind(arg_kind); + if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) { + elements_kind = arg_kind; + } } + if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind); } - if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind); - // If a double array is concatted into a fast elements array, the fast // elements array needs to be initialized to contain proper holes, since // boxing doubles may cause incremental marking. @@ -1051,10 +1047,12 @@ BUILTIN(ArrayConcat) { if (result_len == 0) return *result_array; int j = 0; - Handle<FixedArrayBase> storage(result_array->elements()); + Handle<FixedArrayBase> storage(result_array->elements(), isolate); ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind); for (int i = 0; i < n_arguments; i++) { - Handle<JSArray> array = args.at<JSArray>(i); + // TODO(ishell): It is crucial to keep |array| as a raw pointer to avoid + // performance degradation. Revisit this later. + JSArray* array = JSArray::cast(args[i]); int len = Smi::cast(array->length())->value(); ElementsKind from_kind = array->GetElementsKind(); if (len > 0) { @@ -1143,7 +1141,7 @@ static inline Object* TypeCheck(Heap* heap, template <bool is_construct> -MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( +MUST_USE_RESULT static Object* HandleApiCallHelper( BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) { ASSERT(is_construct == CalledAsConstructor(isolate)); Heap* heap = isolate->heap(); @@ -1152,15 +1150,13 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( Handle<JSFunction> function = args.called_function(); ASSERT(function->shared()->IsApiFunction()); - FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data(); + Handle<FunctionTemplateInfo> fun_data( + function->shared()->get_api_func_data(), isolate); if (is_construct) { - Handle<FunctionTemplateInfo> desc(fun_data, isolate); - bool pending_exception = false; - isolate->factory()->ConfigureInstance( - desc, Handle<JSObject>::cast(args.receiver()), &pending_exception); - ASSERT(isolate->has_pending_exception() == pending_exception); - if (pending_exception) return Failure::Exception(); - fun_data = *desc; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, fun_data, + isolate->factory()->ConfigureInstance( + fun_data, Handle<JSObject>::cast(args.receiver()))); } SharedFunctionInfo* shared = function->shared(); @@ -1172,7 +1168,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( } } - Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data); + Object* raw_holder = TypeCheck(heap, args.length(), &args[0], *fun_data); if (raw_holder->IsNull()) { // This function cannot be called with the given receiver. Abort! @@ -1210,7 +1206,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( result->VerifyApiCallResultType(); } - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); if (!is_construct || result->IsJSObject()) return result; } @@ -1231,7 +1227,7 @@ BUILTIN(HandleApiCallConstruct) { // Helper function to handle calls to non-function objects created through the // API. The object can be called as either a constructor (using new) or just as // a function (without new). -MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor( +MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor( Isolate* isolate, bool is_construct_call, BuiltinArguments<NO_EXTRA_ARGUMENTS> args) { @@ -1280,7 +1276,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor( } } // Check for exceptions and return result. - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return result; } @@ -1424,7 +1420,11 @@ static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) { } -#ifdef ENABLE_DEBUGGER_SUPPORT +static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) { + Debug::GenerateCallICStubDebugBreak(masm); +} + + static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) { Debug::GenerateLoadICDebugBreak(masm); } @@ -1460,12 +1460,6 @@ static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) { } -static void Generate_CallFunctionStub_Recording_DebugBreak( - MacroAssembler* masm) { - Debug::GenerateCallFunctionStubRecordDebugBreak(masm); -} - - static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) { Debug::GenerateCallConstructStubDebugBreak(masm); } @@ -1490,7 +1484,6 @@ static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) { static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) { Debug::GenerateFrameDropperLiveEdit(masm); } -#endif Builtins::Builtins() : initialized_(false) { @@ -1602,7 +1595,6 @@ void Builtins::InitBuiltinFunctionTable() { void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) { ASSERT(!initialized_); - Heap* heap = isolate->heap(); // Create a scope for the handles in the builtins. HandleScope scope(isolate); @@ -1633,32 +1625,18 @@ void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) { CodeDesc desc; masm.GetCode(&desc); Code::Flags flags = functions[i].flags; - Object* code = NULL; - { - // During startup it's OK to always allocate and defer GC to later. - // This simplifies things because we don't need to retry. - AlwaysAllocateScope __scope__(isolate); - { MaybeObject* maybe_code = - heap->CreateCode(desc, flags, masm.CodeObject()); - if (!maybe_code->ToObject(&code)) { - v8::internal::V8::FatalProcessOutOfMemory("CreateCode"); - } - } - } + Handle<Code> code = + isolate->factory()->NewCode(desc, flags, masm.CodeObject()); // Log the event and add the code to the builtins array. PROFILE(isolate, - CodeCreateEvent(Logger::BUILTIN_TAG, - Code::cast(code), - functions[i].s_name)); - GDBJIT(AddCode(GDBJITInterface::BUILTIN, - functions[i].s_name, - Code::cast(code))); - builtins_[i] = code; + CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name)); + GDBJIT(AddCode(GDBJITInterface::BUILTIN, functions[i].s_name, *code)); + builtins_[i] = *code; #ifdef ENABLE_DISASSEMBLER if (FLAG_print_builtin_code) { CodeTracer::Scope trace_scope(isolate->GetCodeTracer()); PrintF(trace_scope.file(), "Builtin: %s\n", functions[i].s_name); - Code::cast(code)->Disassemble(functions[i].s_name, trace_scope.file()); + code->Disassemble(functions[i].s_name, trace_scope.file()); PrintF(trace_scope.file(), "\n"); } #endif diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h index 88cfd53f4..e6b60c732 100644 --- a/deps/v8/src/builtins.h +++ b/deps/v8/src/builtins.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_BUILTINS_H_ #define V8_BUILTINS_H_ @@ -197,19 +174,18 @@ enum BuiltinExtraArguments { V(LoadIC_Normal, LOAD_IC) \ V(StoreIC_Normal, STORE_IC) -#ifdef ENABLE_DEBUGGER_SUPPORT // Define list of builtins used by the debugger implemented in assembly. #define BUILTIN_LIST_DEBUG_A(V) \ V(Return_DebugBreak, BUILTIN, DEBUG_STUB, \ DEBUG_BREAK) \ V(CallFunctionStub_DebugBreak, BUILTIN, DEBUG_STUB, \ DEBUG_BREAK) \ - V(CallFunctionStub_Recording_DebugBreak, BUILTIN, DEBUG_STUB, \ - DEBUG_BREAK) \ V(CallConstructStub_DebugBreak, BUILTIN, DEBUG_STUB, \ DEBUG_BREAK) \ V(CallConstructStub_Recording_DebugBreak, BUILTIN, DEBUG_STUB, \ DEBUG_BREAK) \ + V(CallICStub_DebugBreak, CALL_IC, DEBUG_STUB, \ + DEBUG_BREAK) \ V(LoadIC_DebugBreak, LOAD_IC, DEBUG_STUB, \ DEBUG_BREAK) \ V(KeyedLoadIC_DebugBreak, KEYED_LOAD_IC, DEBUG_STUB, \ @@ -226,9 +202,6 @@ enum BuiltinExtraArguments { DEBUG_BREAK) \ V(FrameDropper_LiveEdit, BUILTIN, DEBUG_STUB, \ DEBUG_BREAK) -#else -#define BUILTIN_LIST_DEBUG_A(V) -#endif // Define list of builtins implemented in JavaScript. #define BUILTINS_LIST_JS(V) \ @@ -260,7 +233,7 @@ enum BuiltinExtraArguments { V(STRING_ADD_LEFT, 1) \ V(STRING_ADD_RIGHT, 1) \ V(APPLY_PREPARE, 1) \ - V(APPLY_OVERFLOW, 1) + V(STACK_OVERFLOW, 1) class BuiltinFunctionTable; class ObjectVisitor; diff --git a/deps/v8/src/bytecodes-irregexp.h b/deps/v8/src/bytecodes-irregexp.h index c7cc66e52..04b9740ac 100644 --- a/deps/v8/src/bytecodes-irregexp.h +++ b/deps/v8/src/bytecodes-irregexp.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_BYTECODES_IRREGEXP_H_ diff --git a/deps/v8/src/cached-powers.cc b/deps/v8/src/cached-powers.cc index faa26cdba..8a0096848 100644 --- a/deps/v8/src/cached-powers.cc +++ b/deps/v8/src/cached-powers.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> #include <limits.h> diff --git a/deps/v8/src/cached-powers.h b/deps/v8/src/cached-powers.h index 88df22260..b58924fb3 100644 --- a/deps/v8/src/cached-powers.h +++ b/deps/v8/src/cached-powers.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CACHED_POWERS_H_ #define V8_CACHED_POWERS_H_ diff --git a/deps/v8/src/char-predicates-inl.h b/deps/v8/src/char-predicates-inl.h index dee9ccd38..16a89f4c3 100644 --- a/deps/v8/src/char-predicates-inl.h +++ b/deps/v8/src/char-predicates-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CHAR_PREDICATES_INL_H_ #define V8_CHAR_PREDICATES_INL_H_ diff --git a/deps/v8/src/char-predicates.h b/deps/v8/src/char-predicates.h index f52feda6c..84247e43c 100644 --- a/deps/v8/src/char-predicates.h +++ b/deps/v8/src/char-predicates.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CHAR_PREDICATES_H_ #define V8_CHAR_PREDICATES_H_ diff --git a/deps/v8/src/checks.cc b/deps/v8/src/checks.cc index 3a2de28a2..4667facad 100644 --- a/deps/v8/src/checks.cc +++ b/deps/v8/src/checks.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "checks.h" @@ -98,8 +75,6 @@ void DumpBacktrace() { // Contains protection against recursive calls (faults while handling faults). extern "C" void V8_Fatal(const char* file, int line, const char* format, ...) { - i::AllowHandleDereference allow_deref; - i::AllowDeferredHandleDereference allow_deferred_deref; fflush(stdout); fflush(stderr); i::OS::PrintError("\n\n#\n# Fatal error in %s, line %d\n# ", file, line); diff --git a/deps/v8/src/checks.h b/deps/v8/src/checks.h index e53475a0a..c2ccc8193 100644 --- a/deps/v8/src/checks.h +++ b/deps/v8/src/checks.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CHECKS_H_ #define V8_CHECKS_H_ diff --git a/deps/v8/src/circular-queue-inl.h b/deps/v8/src/circular-queue-inl.h index dfb703157..14910a17e 100644 --- a/deps/v8/src/circular-queue-inl.h +++ b/deps/v8/src/circular-queue-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CIRCULAR_QUEUE_INL_H_ #define V8_CIRCULAR_QUEUE_INL_H_ diff --git a/deps/v8/src/circular-queue.h b/deps/v8/src/circular-queue.h index 71ef38322..81e80d2aa 100644 --- a/deps/v8/src/circular-queue.h +++ b/deps/v8/src/circular-queue.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CIRCULAR_QUEUE_H_ #define V8_CIRCULAR_QUEUE_H_ diff --git a/deps/v8/src/code-stubs-hydrogen.cc b/deps/v8/src/code-stubs-hydrogen.cc index 040c26013..68c864176 100644 --- a/deps/v8/src/code-stubs-hydrogen.cc +++ b/deps/v8/src/code-stubs-hydrogen.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -60,7 +37,7 @@ class CodeStubGraphBuilderBase : public HGraphBuilder { arguments_length_(NULL), info_(stub, isolate), context_(NULL) { - descriptor_ = stub->GetInterfaceDescriptor(isolate); + descriptor_ = stub->GetInterfaceDescriptor(); parameters_.Reset(new HParameter*[descriptor_->register_param_count_]); } virtual bool BuildGraph(); @@ -238,15 +215,15 @@ class CodeStubGraphBuilder: public CodeStubGraphBuilderBase { }; -Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) { - Factory* factory = isolate->factory(); +Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode() { + Factory* factory = isolate()->factory(); // Generate the new code. - MacroAssembler masm(isolate, NULL, 256); + MacroAssembler masm(isolate(), NULL, 256); { // Update the static counter each time a new code stub is generated. - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); // Generate the code for the stub. masm.set_generating_stub(true); @@ -271,13 +248,14 @@ Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) { template <class Stub> -static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) { +static Handle<Code> DoGenerateCode(Stub* stub) { + Isolate* isolate = stub->isolate(); CodeStub::Major major_key = static_cast<HydrogenCodeStub*>(stub)->MajorKey(); CodeStubInterfaceDescriptor* descriptor = isolate->code_stub_interface_descriptor(major_key); if (descriptor->register_param_count_ < 0) { - stub->InitializeInterfaceDescriptor(isolate, descriptor); + stub->InitializeInterfaceDescriptor(descriptor); } // If we are uninitialized we can use a light-weight stub to enter @@ -285,7 +263,7 @@ static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) { // stub-failure deopt mechanism. if (stub->IsUninitialized() && descriptor->has_miss_handler()) { ASSERT(!descriptor->stack_parameter_count_.is_valid()); - return stub->GenerateLightweightMissCode(isolate); + return stub->GenerateLightweightMissCode(); } ElapsedTimer timer; if (FLAG_profile_hydrogen_code_stub_compilation) { @@ -329,8 +307,8 @@ HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() { } -Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ToNumberStub::GenerateCode() { + return DoGenerateCode(this); } @@ -342,8 +320,8 @@ HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() { } -Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> NumberToStringStub::GenerateCode() { + return DoGenerateCode(this); } @@ -416,8 +394,8 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() { } -Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> FastCloneShallowArrayStub::GenerateCode() { + return DoGenerateCode(this); } @@ -483,8 +461,8 @@ HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() { } -Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> FastCloneShallowObjectStub::GenerateCode() { + return DoGenerateCode(this); } @@ -553,8 +531,8 @@ HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() { } -Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> CreateAllocationSiteStub::GenerateCode() { + return DoGenerateCode(this); } @@ -568,8 +546,8 @@ HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() { } -Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> KeyedLoadFastElementStub::GenerateCode() { + return DoGenerateCode(this); } @@ -602,8 +580,8 @@ HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() { } -Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> LoadFieldStub::GenerateCode() { + return DoGenerateCode(this); } @@ -616,8 +594,8 @@ HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() { } -Handle<Code> StringLengthStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> StringLengthStub::GenerateCode() { + return DoGenerateCode(this); } @@ -632,8 +610,8 @@ HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() { } -Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> KeyedStoreFastElementStub::GenerateCode() { + return DoGenerateCode(this); } @@ -651,8 +629,8 @@ HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() { } -Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> TransitionElementsKindStub::GenerateCode() { + return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( @@ -768,8 +746,8 @@ HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() { } -Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -782,9 +760,8 @@ HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>:: } -Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode( - Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -796,8 +773,8 @@ HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() { } -Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -809,9 +786,8 @@ HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>:: } -Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode( - Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -823,9 +799,8 @@ HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>:: } -Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode( - Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -837,9 +812,8 @@ HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>:: } -Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode( - Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() { + return DoGenerateCode(this); } @@ -864,8 +838,8 @@ HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() { } -Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> CompareNilICStub::GenerateCode() { + return DoGenerateCode(this); } @@ -963,7 +937,7 @@ HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() { if (state.CanReuseDoubleBox()) { HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right; IfBuilder if_heap_number(this); - if_heap_number.IfNot<HIsSmiAndBranch>(operand); + if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE); if_heap_number.Then(); Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result); Push(operand); @@ -977,8 +951,8 @@ HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() { } -Handle<Code> BinaryOpICStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> BinaryOpICStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1002,8 +976,8 @@ HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() { } -Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1028,8 +1002,8 @@ HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() { } -Handle<Code> StringAddStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> StringAddStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1047,8 +1021,8 @@ HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() { } -Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ToBooleanStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1068,7 +1042,7 @@ HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() { Handle<Map> placeholder_map = isolate()->factory()->meta_map(); HValue* global = Add<HConstant>( StoreGlobalStub::global_placeholder(isolate())); - Add<HCheckMaps>(global, placeholder_map, top_info()); + Add<HCheckMaps>(global, placeholder_map); } HValue* cell = Add<HConstant>(placeholder_cell); @@ -1100,8 +1074,8 @@ HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() { } -Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> StoreGlobalStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1134,8 +1108,8 @@ HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() { } -Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1344,8 +1318,8 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { } -Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> FastNewClosureStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1399,8 +1373,8 @@ HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() { } -Handle<Code> FastNewContextStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> FastNewContextStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1415,8 +1389,8 @@ HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() { } -Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode() { + return DoGenerateCode(this); } @@ -1431,8 +1405,8 @@ HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() { } -Handle<Code> RegExpConstructResultStub::GenerateCode(Isolate* isolate) { - return DoGenerateCode(isolate, this); +Handle<Code> RegExpConstructResultStub::GenerateCode() { + return DoGenerateCode(this); } diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc index 06203629a..24f60ed41 100644 --- a/deps/v8/src/code-stubs.cc +++ b/deps/v8/src/code-stubs.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -51,8 +28,8 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor() has_miss_handler_(false) { } -bool CodeStub::FindCodeInCache(Code** code_out, Isolate* isolate) { - UnseededNumberDictionary* stubs = isolate->heap()->code_stubs(); +bool CodeStub::FindCodeInCache(Code** code_out) { + UnseededNumberDictionary* stubs = isolate()->heap()->code_stubs(); int index = stubs->FindEntry(GetKey()); if (index != UnseededNumberDictionary::kNotFound) { *code_out = Code::cast(stubs->ValueAt(index)); @@ -72,11 +49,12 @@ SmartArrayPointer<const char> CodeStub::GetName() { } -void CodeStub::RecordCodeGeneration(Code* code, Isolate* isolate) { +void CodeStub::RecordCodeGeneration(Handle<Code> code) { + IC::RegisterWeakMapDependency(code); SmartArrayPointer<const char> name = GetName(); - PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, name.get())); - GDBJIT(AddCode(GDBJITInterface::STUB, name.get(), code)); - Counters* counters = isolate->counters(); + PROFILE(isolate(), CodeCreateEvent(Logger::STUB_TAG, *code, name.get())); + GDBJIT(AddCode(GDBJITInterface::STUB, name.get(), *code)); + Counters* counters = isolate()->counters(); counters->total_stubs_code_size()->Increment(code->instruction_size()); } @@ -86,25 +64,24 @@ Code::Kind CodeStub::GetCodeKind() const { } -Handle<Code> CodeStub::GetCodeCopy(Isolate* isolate, - const Code::FindAndReplacePattern& pattern) { - Handle<Code> ic = GetCode(isolate); - ic = isolate->factory()->CopyCode(ic); +Handle<Code> CodeStub::GetCodeCopy(const Code::FindAndReplacePattern& pattern) { + Handle<Code> ic = GetCode(); + ic = isolate()->factory()->CopyCode(ic); ic->FindAndReplace(pattern); - RecordCodeGeneration(*ic, isolate); + RecordCodeGeneration(ic); return ic; } -Handle<Code> PlatformCodeStub::GenerateCode(Isolate* isolate) { - Factory* factory = isolate->factory(); +Handle<Code> PlatformCodeStub::GenerateCode() { + Factory* factory = isolate()->factory(); // Generate the new code. - MacroAssembler masm(isolate, NULL, 256); + MacroAssembler masm(isolate(), NULL, 256); { // Update the static counter each time a new code stub is generated. - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); // Generate the code for the stub. masm.set_generating_stub(true); @@ -128,37 +105,36 @@ Handle<Code> PlatformCodeStub::GenerateCode(Isolate* isolate) { } -void CodeStub::VerifyPlatformFeatures(Isolate* isolate) { +void CodeStub::VerifyPlatformFeatures() { ASSERT(CpuFeatures::VerifyCrossCompiling()); } -Handle<Code> CodeStub::GetCode(Isolate* isolate) { - Factory* factory = isolate->factory(); - Heap* heap = isolate->heap(); +Handle<Code> CodeStub::GetCode() { + Heap* heap = isolate()->heap(); Code* code; if (UseSpecialCache() - ? FindCodeInSpecialCache(&code, isolate) - : FindCodeInCache(&code, isolate)) { + ? FindCodeInSpecialCache(&code) + : FindCodeInCache(&code)) { ASSERT(GetCodeKind() == code->kind()); return Handle<Code>(code); } #ifdef DEBUG - VerifyPlatformFeatures(isolate); + VerifyPlatformFeatures(); #endif { - HandleScope scope(isolate); + HandleScope scope(isolate()); - Handle<Code> new_object = GenerateCode(isolate); + Handle<Code> new_object = GenerateCode(); new_object->set_major_key(MajorKey()); FinishCode(new_object); - RecordCodeGeneration(*new_object, isolate); + RecordCodeGeneration(new_object); #ifdef ENABLE_DISASSEMBLER if (FLAG_print_code_stubs) { - CodeTracer::Scope trace_scope(isolate->GetCodeTracer()); + CodeTracer::Scope trace_scope(isolate()->GetCodeTracer()); new_object->Disassemble(GetName().get(), trace_scope.file()); PrintF(trace_scope.file(), "\n"); } @@ -169,7 +145,7 @@ Handle<Code> CodeStub::GetCode(Isolate* isolate) { } else { // Update the dictionary and the root in Heap. Handle<UnseededNumberDictionary> dict = - factory->DictionaryAtNumberPut( + UnseededNumberDictionary::AtNumberPut( Handle<UnseededNumberDictionary>(heap->code_stubs()), GetKey(), new_object); @@ -182,7 +158,7 @@ Handle<Code> CodeStub::GetCode(Isolate* isolate) { ASSERT(!NeedsImmovableCode() || heap->lo_space()->Contains(code) || heap->code_space()->FirstPage()->Contains(code->address())); - return Handle<Code>(code, isolate); + return Handle<Code>(code, isolate()); } @@ -218,9 +194,10 @@ void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate) { // Generate the uninitialized versions of the stub. for (int op = Token::BIT_OR; op <= Token::MOD; ++op) { for (int mode = NO_OVERWRITE; mode <= OVERWRITE_RIGHT; ++mode) { - BinaryOpICStub stub(static_cast<Token::Value>(op), + BinaryOpICStub stub(isolate, + static_cast<Token::Value>(op), static_cast<OverwriteMode>(mode)); - stub.GetCode(isolate); + stub.GetCode(); } } @@ -237,8 +214,8 @@ void BinaryOpICStub::PrintState(StringStream* stream) { // static void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate, const BinaryOpIC::State& state) { - BinaryOpICStub stub(state); - stub.GetCode(isolate); + BinaryOpICStub stub(isolate, state); + stub.GetCode(); } @@ -258,8 +235,8 @@ void BinaryOpICWithAllocationSiteStub::PrintState(StringStream* stream) { void BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime( Isolate* isolate, const BinaryOpIC::State& state) { if (state.CouldCreateAllocationMementos()) { - BinaryOpICWithAllocationSiteStub stub(state); - stub.GetCode(isolate); + BinaryOpICWithAllocationSiteStub stub(isolate, state); + stub.GetCode(); } } @@ -312,8 +289,8 @@ void ICCompareStub::AddToSpecialCache(Handle<Code> new_object) { } -bool ICCompareStub::FindCodeInSpecialCache(Code** code_out, Isolate* isolate) { - Factory* factory = isolate->factory(); +bool ICCompareStub::FindCodeInSpecialCache(Code** code_out) { + Factory* factory = isolate()->factory(); Code::Flags flags = Code::ComputeFlags( GetCodeKind(), UNINITIALIZED); @@ -324,7 +301,7 @@ bool ICCompareStub::FindCodeInSpecialCache(Code** code_out, Isolate* isolate) { *factory->strict_compare_ic_string() : *factory->compare_ic_string(), flags), - isolate); + isolate()); if (probe->IsCode()) { *code_out = Code::cast(*probe); #ifdef DEBUG @@ -501,6 +478,11 @@ Type* CompareNilICStub::GetInputType(Zone* zone, Handle<Map> map) { } +void CallICStub::PrintState(StringStream* stream) { + state_.Print(stream); +} + + void InstanceofStub::PrintName(StringStream* stream) { const char* args = ""; if (HasArgsInRegisters()) { @@ -539,8 +521,8 @@ void KeyedLoadDictionaryElementPlatformStub::Generate( void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) { - CreateAllocationSiteStub stub; - stub.GetCode(isolate); + CreateAllocationSiteStub stub(isolate); + stub.GetCode(); } @@ -583,7 +565,6 @@ void ArgumentsAccessStub::PrintName(StringStream* stream) { void CallFunctionStub::PrintName(StringStream* stream) { stream->Add("CallFunctionStub_Args%d", argc_); - if (RecordCallTarget()) stream->Add("_Recording"); } @@ -695,10 +676,10 @@ bool ToBooleanStub::Types::CanBeUndetectable() const { void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) { - StubFailureTrampolineStub stub1(NOT_JS_FUNCTION_STUB_MODE); - StubFailureTrampolineStub stub2(JS_FUNCTION_STUB_MODE); - stub1.GetCode(isolate); - stub2.GetCode(isolate); + StubFailureTrampolineStub stub1(isolate, NOT_JS_FUNCTION_STUB_MODE); + StubFailureTrampolineStub stub2(isolate, JS_FUNCTION_STUB_MODE); + stub1.GetCode(); + stub2.GetCode(); } @@ -716,42 +697,44 @@ static void InstallDescriptor(Isolate* isolate, HydrogenCodeStub* stub) { CodeStubInterfaceDescriptor* descriptor = isolate->code_stub_interface_descriptor(major_key); if (!descriptor->initialized()) { - stub->InitializeInterfaceDescriptor(isolate, descriptor); + stub->InitializeInterfaceDescriptor(descriptor); } } void ArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) { - ArrayNoArgumentConstructorStub stub1(GetInitialFastElementsKind()); + ArrayNoArgumentConstructorStub stub1(isolate, GetInitialFastElementsKind()); InstallDescriptor(isolate, &stub1); - ArraySingleArgumentConstructorStub stub2(GetInitialFastElementsKind()); + ArraySingleArgumentConstructorStub stub2(isolate, + GetInitialFastElementsKind()); InstallDescriptor(isolate, &stub2); - ArrayNArgumentsConstructorStub stub3(GetInitialFastElementsKind()); + ArrayNArgumentsConstructorStub stub3(isolate, GetInitialFastElementsKind()); InstallDescriptor(isolate, &stub3); } void NumberToStringStub::InstallDescriptors(Isolate* isolate) { - NumberToStringStub stub; + NumberToStringStub stub(isolate); InstallDescriptor(isolate, &stub); } void FastNewClosureStub::InstallDescriptors(Isolate* isolate) { - FastNewClosureStub stub(STRICT, false); + FastNewClosureStub stub(isolate, STRICT, false); InstallDescriptor(isolate, &stub); } void FastNewContextStub::InstallDescriptors(Isolate* isolate) { - FastNewContextStub stub(FastNewContextStub::kMaximumSlots); + FastNewContextStub stub(isolate, FastNewContextStub::kMaximumSlots); InstallDescriptor(isolate, &stub); } // static void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) { - FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS, + FastCloneShallowArrayStub stub(isolate, + FastCloneShallowArrayStub::CLONE_ELEMENTS, DONT_TRACK_ALLOCATION_SITE, 0); InstallDescriptor(isolate, &stub); } @@ -759,40 +742,41 @@ void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) { // static void BinaryOpICStub::InstallDescriptors(Isolate* isolate) { - BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); + BinaryOpICStub stub(isolate, Token::ADD, NO_OVERWRITE); InstallDescriptor(isolate, &stub); } // static void BinaryOpWithAllocationSiteStub::InstallDescriptors(Isolate* isolate) { - BinaryOpWithAllocationSiteStub stub(Token::ADD, NO_OVERWRITE); + BinaryOpWithAllocationSiteStub stub(isolate, Token::ADD, NO_OVERWRITE); InstallDescriptor(isolate, &stub); } // static void StringAddStub::InstallDescriptors(Isolate* isolate) { - StringAddStub stub(STRING_ADD_CHECK_NONE, NOT_TENURED); + StringAddStub stub(isolate, STRING_ADD_CHECK_NONE, NOT_TENURED); InstallDescriptor(isolate, &stub); } // static void RegExpConstructResultStub::InstallDescriptors(Isolate* isolate) { - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate); InstallDescriptor(isolate, &stub); } ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate) - : argument_count_(ANY) { + : PlatformCodeStub(isolate), argument_count_(ANY) { ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); } ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate, - int argument_count) { + int argument_count) + : PlatformCodeStub(isolate) { if (argument_count == 0) { argument_count_ = NONE; } else if (argument_count == 1) { @@ -807,16 +791,16 @@ ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate, void InternalArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) { - InternalArrayNoArgumentConstructorStub stub1(FAST_ELEMENTS); + InternalArrayNoArgumentConstructorStub stub1(isolate, FAST_ELEMENTS); InstallDescriptor(isolate, &stub1); - InternalArraySingleArgumentConstructorStub stub2(FAST_ELEMENTS); + InternalArraySingleArgumentConstructorStub stub2(isolate, FAST_ELEMENTS); InstallDescriptor(isolate, &stub2); - InternalArrayNArgumentsConstructorStub stub3(FAST_ELEMENTS); + InternalArrayNArgumentsConstructorStub stub3(isolate, FAST_ELEMENTS); InstallDescriptor(isolate, &stub3); } InternalArrayConstructorStub::InternalArrayConstructorStub( - Isolate* isolate) { + Isolate* isolate) : PlatformCodeStub(isolate) { InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); } diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h index 5a8894233..8380266d9 100644 --- a/deps/v8/src/code-stubs.h +++ b/deps/v8/src/code-stubs.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CODE_STUBS_H_ #define V8_CODE_STUBS_H_ @@ -51,6 +28,7 @@ namespace internal { V(CompareIC) \ V(CompareNilIC) \ V(MathPow) \ + V(CallIC) \ V(FunctionPrototype) \ V(RecordWrite) \ V(StoreBufferOverflow) \ @@ -74,7 +52,6 @@ namespace internal { V(CEntry) \ V(JSEntry) \ V(KeyedLoadElement) \ - V(ArrayPush) \ V(ArrayNoArgumentConstructor) \ V(ArraySingleArgumentConstructor) \ V(ArrayNArgumentsConstructor) \ @@ -155,11 +132,10 @@ class CodeStub BASE_EMBEDDED { }; // Retrieve the code for the stub. Generate the code if needed. - Handle<Code> GetCode(Isolate* isolate); + Handle<Code> GetCode(); // Retrieve the code for the stub, make and return a copy of the code. - Handle<Code> GetCodeCopy( - Isolate* isolate, const Code::FindAndReplacePattern& pattern); + Handle<Code> GetCodeCopy(const Code::FindAndReplacePattern& pattern); static Major MajorKeyFromKey(uint32_t key) { return static_cast<Major>(MajorKeyBits::decode(key)); @@ -175,6 +151,7 @@ class CodeStub BASE_EMBEDDED { static const char* MajorName(Major major_key, bool allow_unknown_keys); + explicit CodeStub(Isolate* isolate) : isolate_(isolate) { } virtual ~CodeStub() {} static void GenerateStubsAheadOfTime(Isolate* isolate); @@ -189,7 +166,7 @@ class CodeStub BASE_EMBEDDED { virtual bool SometimesSetsUpAFrame() { return true; } // Lookup the code in the (possibly custom) cache. - bool FindCodeInCache(Code** code_out, Isolate* isolate); + bool FindCodeInCache(Code** code_out); // Returns information for computing the number key. virtual Major MajorKey() = 0; @@ -210,13 +187,15 @@ class CodeStub BASE_EMBEDDED { // Returns a name for logging/debugging purposes. SmartArrayPointer<const char> GetName(); + Isolate* isolate() const { return isolate_; } + protected: static bool CanUseFPRegisters(); // Generates the assembler code for the stub. - virtual Handle<Code> GenerateCode(Isolate* isolate) = 0; + virtual Handle<Code> GenerateCode() = 0; - virtual void VerifyPlatformFeatures(Isolate* isolate); + virtual void VerifyPlatformFeatures(); // Returns whether the code generated for this stub needs to be allocated as // a fixed (non-moveable) code object. @@ -228,7 +207,7 @@ class CodeStub BASE_EMBEDDED { private: // Perform bookkeeping required after code generation when stub code is // initially generated. - void RecordCodeGeneration(Code* code, Isolate* isolate); + void RecordCodeGeneration(Handle<Code> code); // Finish the code object after it has been generated. virtual void FinishCode(Handle<Code> code) { } @@ -246,7 +225,7 @@ class CodeStub BASE_EMBEDDED { virtual void AddToSpecialCache(Handle<Code> new_object) { } // Find code in a specialized cache, work is delegated to the specific stub. - virtual bool FindCodeInSpecialCache(Code** code_out, Isolate* isolate) { + virtual bool FindCodeInSpecialCache(Code** code_out) { return false; } @@ -266,13 +245,17 @@ class CodeStub BASE_EMBEDDED { kStubMajorKeyBits, kStubMinorKeyBits> {}; // NOLINT friend class BreakPointIterator; + + Isolate* isolate_; }; class PlatformCodeStub : public CodeStub { public: + explicit PlatformCodeStub(Isolate* isolate) : CodeStub(isolate) { } + // Retrieve the code for the stub. Generate the code if needed. - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual Code::Kind GetCodeKind() const { return Code::STUB; } @@ -386,14 +369,15 @@ class HydrogenCodeStub : public CodeStub { INITIALIZED }; - explicit HydrogenCodeStub(InitializationState state = INITIALIZED) { + HydrogenCodeStub(Isolate* isolate, InitializationState state = INITIALIZED) + : CodeStub(isolate) { is_uninitialized_ = (state == UNINITIALIZED); } virtual Code::Kind GetCodeKind() const { return Code::STUB; } - CodeStubInterfaceDescriptor* GetInterfaceDescriptor(Isolate* isolate) { - return isolate->code_stub_interface_descriptor(MajorKey()); + CodeStubInterfaceDescriptor* GetInterfaceDescriptor() { + return isolate()->code_stub_interface_descriptor(MajorKey()); } bool IsUninitialized() { return is_uninitialized_; } @@ -405,15 +389,14 @@ class HydrogenCodeStub : public CodeStub { } virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) = 0; // Retrieve the code for the stub. Generate the code if needed. - virtual Handle<Code> GenerateCode(Isolate* isolate) = 0; + virtual Handle<Code> GenerateCode() = 0; virtual int NotMissMinorKey() = 0; - Handle<Code> GenerateLightweightMissCode(Isolate* isolate); + Handle<Code> GenerateLightweightMissCode(); template<class StateType> void TraceTransition(StateType from, StateType to); @@ -494,18 +477,16 @@ class NopRuntimeCallHelper : public RuntimeCallHelper { class ToNumberStub: public HydrogenCodeStub { public: - ToNumberStub() { } + explicit ToNumberStub(Isolate* isolate) : HydrogenCodeStub(isolate) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate) { - ToNumberStub stub; + ToNumberStub stub(isolate); stub.InitializeInterfaceDescriptor( - isolate, isolate->code_stub_interface_descriptor(CodeStub::ToNumber)); } @@ -517,12 +498,11 @@ class ToNumberStub: public HydrogenCodeStub { class NumberToStringStub V8_FINAL : public HydrogenCodeStub { public: - NumberToStringStub() {} + explicit NumberToStringStub(Isolate* isolate) : HydrogenCodeStub(isolate) {} - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -538,15 +518,17 @@ class NumberToStringStub V8_FINAL : public HydrogenCodeStub { class FastNewClosureStub : public HydrogenCodeStub { public: - explicit FastNewClosureStub(StrictMode strict_mode, bool is_generator) - : strict_mode_(strict_mode), - is_generator_(is_generator) { } + FastNewClosureStub(Isolate* isolate, + StrictMode strict_mode, + bool is_generator) + : HydrogenCodeStub(isolate), + strict_mode_(strict_mode), + is_generator_(is_generator) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -572,15 +554,15 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub { public: static const int kMaximumSlots = 64; - explicit FastNewContextStub(int slots) : slots_(slots) { + FastNewContextStub(Isolate* isolate, int slots) + : HydrogenCodeStub(isolate), slots_(slots) { ASSERT(slots_ > 0 && slots_ <= kMaximumSlots); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -611,10 +593,12 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub { static const int kFastCloneModeCount = LAST_CLONE_MODE + 1; - FastCloneShallowArrayStub(Mode mode, + FastCloneShallowArrayStub(Isolate* isolate, + Mode mode, AllocationSiteMode allocation_site_mode, int length) - : mode_(mode), + : HydrogenCodeStub(isolate), + mode_(mode), allocation_site_mode_(allocation_site_mode), length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) { ASSERT_GE(length_, 0); @@ -641,11 +625,10 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub { return LAST_ELEMENTS_KIND; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -675,18 +658,18 @@ class FastCloneShallowObjectStub : public HydrogenCodeStub { // Maximum number of properties in copied object. static const int kMaximumClonedProperties = 6; - explicit FastCloneShallowObjectStub(int length) : length_(length) { + FastCloneShallowObjectStub(Isolate* isolate, int length) + : HydrogenCodeStub(isolate), length_(length) { ASSERT_GE(length_, 0); ASSERT_LE(length_, kMaximumClonedProperties); } int length() const { return length_; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: int length_; @@ -700,15 +683,15 @@ class FastCloneShallowObjectStub : public HydrogenCodeStub { class CreateAllocationSiteStub : public HydrogenCodeStub { public: - explicit CreateAllocationSiteStub() { } + explicit CreateAllocationSiteStub(Isolate* isolate) + : HydrogenCodeStub(isolate) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; static void GenerateAheadOfTime(Isolate* isolate); virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return CreateAllocationSite; } @@ -727,7 +710,8 @@ class InstanceofStub: public PlatformCodeStub { kReturnTrueFalseObject = 1 << 2 }; - explicit InstanceofStub(Flags flags) : flags_(flags) { } + InstanceofStub(Isolate* isolate, Flags flags) + : PlatformCodeStub(isolate), flags_(flags) { } static Register left(); static Register right(); @@ -801,8 +785,8 @@ class MathPowStub: public PlatformCodeStub { public: enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK }; - explicit MathPowStub(ExponentType exponent_type) - : exponent_type_(exponent_type) { } + MathPowStub(Isolate* isolate, ExponentType exponent_type) + : PlatformCodeStub(isolate), exponent_type_(exponent_type) { } virtual void Generate(MacroAssembler* masm); private: @@ -815,7 +799,8 @@ class MathPowStub: public PlatformCodeStub { class ICStub: public PlatformCodeStub { public: - explicit ICStub(Code::Kind kind) : kind_(kind) { } + ICStub(Isolate* isolate, Code::Kind kind) + : PlatformCodeStub(isolate), kind_(kind) { } virtual Code::Kind GetCodeKind() const { return kind_; } virtual InlineCacheState GetICState() { return MONOMORPHIC; } @@ -839,9 +824,52 @@ class ICStub: public PlatformCodeStub { }; +class CallICStub: public PlatformCodeStub { + public: + CallICStub(Isolate* isolate, const CallIC::State& state) + : PlatformCodeStub(isolate), state_(state) {} + + bool CallAsMethod() const { return state_.CallAsMethod(); } + + int arg_count() const { return state_.arg_count(); } + + static int ExtractArgcFromMinorKey(int minor_key) { + CallIC::State state((ExtraICState) minor_key); + return state.arg_count(); + } + + virtual void Generate(MacroAssembler* masm); + + virtual Code::Kind GetCodeKind() const V8_OVERRIDE { + return Code::CALL_IC; + } + + virtual InlineCacheState GetICState() V8_FINAL V8_OVERRIDE { + return state_.GetICState(); + } + + virtual ExtraICState GetExtraICState() V8_FINAL V8_OVERRIDE { + return state_.GetExtraICState(); + } + + protected: + virtual int MinorKey() { return GetExtraICState(); } + virtual void PrintState(StringStream* stream) V8_FINAL V8_OVERRIDE; + + private: + virtual CodeStub::Major MajorKey() { return CallIC; } + + // Code generation helpers. + void GenerateMiss(MacroAssembler* masm); + + CallIC::State state_; +}; + + class FunctionPrototypeStub: public ICStub { public: - explicit FunctionPrototypeStub(Code::Kind kind) : ICStub(kind) { } + FunctionPrototypeStub(Isolate* isolate, Code::Kind kind) + : ICStub(isolate, kind) { } virtual void Generate(MacroAssembler* masm); private: @@ -851,8 +879,8 @@ class FunctionPrototypeStub: public ICStub { class StoreICStub: public ICStub { public: - StoreICStub(Code::Kind kind, StrictMode strict_mode) - : ICStub(kind), strict_mode_(strict_mode) { } + StoreICStub(Isolate* isolate, Code::Kind kind, StrictMode strict_mode) + : ICStub(isolate, kind), strict_mode_(strict_mode) { } protected: virtual ExtraICState GetExtraICState() { @@ -872,6 +900,7 @@ class StoreICStub: public ICStub { class HICStub: public HydrogenCodeStub { public: + explicit HICStub(Isolate* isolate) : HydrogenCodeStub(isolate) { } virtual Code::Kind GetCodeKind() const { return kind(); } virtual InlineCacheState GetICState() { return MONOMORPHIC; } @@ -887,7 +916,7 @@ class HandlerStub: public HICStub { virtual ExtraICState GetExtraICState() { return kind(); } protected: - HandlerStub() : HICStub() { } + explicit HandlerStub(Isolate* isolate) : HICStub(isolate) { } virtual int NotMissMinorKey() { return bit_field_; } int bit_field_; }; @@ -895,15 +924,17 @@ class HandlerStub: public HICStub { class LoadFieldStub: public HandlerStub { public: - LoadFieldStub(bool inobject, int index, Representation representation) { + LoadFieldStub(Isolate* isolate, + bool inobject, + int index, Representation representation) + : HandlerStub(isolate) { Initialize(Code::LOAD_IC, inobject, index, representation); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; Representation representation() { if (unboxed_double()) return Representation::Double(); @@ -932,7 +963,7 @@ class LoadFieldStub: public HandlerStub { virtual Code::StubType GetStubType() { return Code::FAST; } protected: - LoadFieldStub() : HandlerStub() { } + explicit LoadFieldStub(Isolate* isolate) : HandlerStub(isolate) { } void Initialize(Code::Kind kind, bool inobject, @@ -955,13 +986,12 @@ class LoadFieldStub: public HandlerStub { class StringLengthStub: public HandlerStub { public: - explicit StringLengthStub() : HandlerStub() { + explicit StringLengthStub(Isolate* isolate) : HandlerStub(isolate) { Initialize(Code::LOAD_IC); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; protected: virtual Code::Kind kind() const { @@ -979,12 +1009,11 @@ class StringLengthStub: public HandlerStub { class KeyedStringLengthStub: public StringLengthStub { public: - explicit KeyedStringLengthStub() : StringLengthStub() { + explicit KeyedStringLengthStub(Isolate* isolate) : StringLengthStub(isolate) { Initialize(Code::KEYED_LOAD_IC); } virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: virtual CodeStub::Major MajorKey() { return KeyedStringLength; } @@ -993,7 +1022,8 @@ class KeyedStringLengthStub: public StringLengthStub { class StoreGlobalStub : public HandlerStub { public: - explicit StoreGlobalStub(bool is_constant, bool check_global) { + StoreGlobalStub(Isolate* isolate, bool is_constant, bool check_global) + : HandlerStub(isolate) { bit_field_ = IsConstantBits::encode(is_constant) | CheckGlobalBits::encode(check_global); } @@ -1002,29 +1032,27 @@ class StoreGlobalStub : public HandlerStub { return isolate->factory()->uninitialized_value(); } - Handle<Code> GetCodeCopyFromTemplate(Isolate* isolate, - Handle<GlobalObject> global, + Handle<Code> GetCodeCopyFromTemplate(Handle<GlobalObject> global, Handle<PropertyCell> cell) { if (check_global()) { Code::FindAndReplacePattern pattern; - pattern.Add(Handle<Map>(global_placeholder(isolate)->map()), global); - pattern.Add(isolate->factory()->meta_map(), Handle<Map>(global->map())); - pattern.Add(isolate->factory()->global_property_cell_map(), cell); - return CodeStub::GetCodeCopy(isolate, pattern); + pattern.Add(Handle<Map>(global_placeholder(isolate())->map()), global); + pattern.Add(isolate()->factory()->meta_map(), Handle<Map>(global->map())); + pattern.Add(isolate()->factory()->global_property_cell_map(), cell); + return CodeStub::GetCodeCopy(pattern); } else { Code::FindAndReplacePattern pattern; - pattern.Add(isolate->factory()->global_property_cell_map(), cell); - return CodeStub::GetCodeCopy(isolate, pattern); + pattern.Add(isolate()->factory()->global_property_cell_map(), cell); + return CodeStub::GetCodeCopy(pattern); } } virtual Code::Kind kind() const { return Code::STORE_IC; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; bool is_constant() const { return IsConstantBits::decode(bit_field_); @@ -1056,9 +1084,10 @@ class StoreGlobalStub : public HandlerStub { class CallApiFunctionStub : public PlatformCodeStub { public: - CallApiFunctionStub(bool is_store, + CallApiFunctionStub(Isolate* isolate, + bool is_store, bool call_data_undefined, - int argc) { + int argc) : PlatformCodeStub(isolate) { bit_field_ = IsStoreBits::encode(is_store) | CallDataUndefinedBits::encode(call_data_undefined) | @@ -1083,7 +1112,7 @@ class CallApiFunctionStub : public PlatformCodeStub { class CallApiGetterStub : public PlatformCodeStub { public: - CallApiGetterStub() {} + explicit CallApiGetterStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: virtual void Generate(MacroAssembler* masm) V8_OVERRIDE; @@ -1096,14 +1125,15 @@ class CallApiGetterStub : public PlatformCodeStub { class KeyedLoadFieldStub: public LoadFieldStub { public: - KeyedLoadFieldStub(bool inobject, int index, Representation representation) - : LoadFieldStub() { + KeyedLoadFieldStub(Isolate* isolate, + bool inobject, + int index, Representation representation) + : LoadFieldStub(isolate) { Initialize(Code::KEYED_LOAD_IC, inobject, index, representation); } virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: virtual CodeStub::Major MajorKey() { return KeyedLoadField; } @@ -1112,15 +1142,16 @@ class KeyedLoadFieldStub: public LoadFieldStub { class BinaryOpICStub : public HydrogenCodeStub { public: - BinaryOpICStub(Token::Value op, OverwriteMode mode) - : HydrogenCodeStub(UNINITIALIZED), state_(op, mode) {} + BinaryOpICStub(Isolate* isolate, Token::Value op, OverwriteMode mode) + : HydrogenCodeStub(isolate, UNINITIALIZED), state_(isolate, op, mode) {} - explicit BinaryOpICStub(const BinaryOpIC::State& state) : state_(state) {} + BinaryOpICStub(Isolate* isolate, const BinaryOpIC::State& state) + : HydrogenCodeStub(isolate), state_(state) {} static void GenerateAheadOfTime(Isolate* isolate); virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -1136,11 +1167,11 @@ class BinaryOpICStub : public HydrogenCodeStub { return state_.GetExtraICState(); } - virtual void VerifyPlatformFeatures(Isolate* isolate) V8_FINAL V8_OVERRIDE { + virtual void VerifyPlatformFeatures() V8_FINAL V8_OVERRIDE { ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2)); } - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; const BinaryOpIC::State& state() const { return state_; } @@ -1165,44 +1196,20 @@ class BinaryOpICStub : public HydrogenCodeStub { }; -class ArrayPushStub: public PlatformCodeStub { - public: - ArrayPushStub(ElementsKind kind, int argc) { - bit_field_ = ElementsKindBits::encode(kind) | ArgcBits::encode(argc); - } - - void Generate(MacroAssembler* masm); - - private: - int arguments_count() { return ArgcBits::decode(bit_field_); } - ElementsKind elements_kind() { - return ElementsKindBits::decode(bit_field_); - } - - virtual CodeStub::Major MajorKey() { return ArrayPush; } - virtual int MinorKey() { return bit_field_; } - - class ElementsKindBits: public BitField<ElementsKind, 0, 3> {}; - class ArgcBits: public BitField<int, 3, Code::kArgumentsBits> {}; - - int bit_field_; -}; - - // TODO(bmeurer): Merge this into the BinaryOpICStub once we have proper tail // call support for stubs in Hydrogen. class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub { public: - explicit BinaryOpICWithAllocationSiteStub(const BinaryOpIC::State& state) - : state_(state) {} + BinaryOpICWithAllocationSiteStub(Isolate* isolate, + const BinaryOpIC::State& state) + : PlatformCodeStub(isolate), state_(state) {} static void GenerateAheadOfTime(Isolate* isolate); - Handle<Code> GetCodeCopyFromTemplate(Isolate* isolate, - Handle<AllocationSite> allocation_site) { + Handle<Code> GetCodeCopyFromTemplate(Handle<AllocationSite> allocation_site) { Code::FindAndReplacePattern pattern; - pattern.Add(isolate->factory()->oddball_map(), allocation_site); - return CodeStub::GetCodeCopy(isolate, pattern); + pattern.Add(isolate()->factory()->undefined_map(), allocation_site); + return CodeStub::GetCodeCopy(pattern); } virtual Code::Kind GetCodeKind() const V8_OVERRIDE { @@ -1217,7 +1224,7 @@ class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub { return state_.GetExtraICState(); } - virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE { + virtual void VerifyPlatformFeatures() V8_OVERRIDE { ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2)); } @@ -1240,14 +1247,17 @@ class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub { class BinaryOpWithAllocationSiteStub V8_FINAL : public BinaryOpICStub { public: - BinaryOpWithAllocationSiteStub(Token::Value op, OverwriteMode mode) - : BinaryOpICStub(op, mode) {} + BinaryOpWithAllocationSiteStub(Isolate* isolate, + Token::Value op, + OverwriteMode mode) + : BinaryOpICStub(isolate, op, mode) {} - explicit BinaryOpWithAllocationSiteStub(const BinaryOpIC::State& state) - : BinaryOpICStub(state) {} + BinaryOpWithAllocationSiteStub(Isolate* isolate, + const BinaryOpIC::State& state) + : BinaryOpICStub(isolate, state) {} virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -1255,7 +1265,7 @@ class BinaryOpWithAllocationSiteStub V8_FINAL : public BinaryOpICStub { return Code::STUB; } - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual Major MajorKey() V8_OVERRIDE { return BinaryOpWithAllocationSite; @@ -1282,8 +1292,11 @@ enum StringAddFlags { class StringAddStub V8_FINAL : public HydrogenCodeStub { public: - StringAddStub(StringAddFlags flags, PretenureFlag pretenure_flag) - : bit_field_(StringAddFlagsBits::encode(flags) | + StringAddStub(Isolate* isolate, + StringAddFlags flags, + PretenureFlag pretenure_flag) + : HydrogenCodeStub(isolate), + bit_field_(StringAddFlagsBits::encode(flags) | PretenureFlagBits::encode(pretenure_flag)) {} StringAddFlags flags() const { @@ -1294,14 +1307,13 @@ class StringAddStub V8_FINAL : public HydrogenCodeStub { return PretenureFlagBits::decode(bit_field_); } - virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE { + virtual void VerifyPlatformFeatures() V8_OVERRIDE { ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2)); } - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate); @@ -1326,11 +1338,13 @@ class StringAddStub V8_FINAL : public HydrogenCodeStub { class ICCompareStub: public PlatformCodeStub { public: - ICCompareStub(Token::Value op, + ICCompareStub(Isolate* isolate, + Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler) - : op_(op), + : PlatformCodeStub(isolate), + op_(op), left_(left), right_(right), state_(handler) { @@ -1378,7 +1392,7 @@ class ICCompareStub: public PlatformCodeStub { Condition GetCondition() const { return CompareIC::ComputeCondition(op_); } virtual void AddToSpecialCache(Handle<Code> new_object); - virtual bool FindCodeInSpecialCache(Code** code_out, Isolate* isolate); + virtual bool FindCodeInSpecialCache(Code** code_out); virtual bool UseSpecialCache() { return state_ == CompareIC::KNOWN_OBJECT; } Token::Value op_; @@ -1394,28 +1408,28 @@ class CompareNilICStub : public HydrogenCodeStub { Type* GetType(Zone* zone, Handle<Map> map = Handle<Map>()); Type* GetInputType(Zone* zone, Handle<Map> map); - explicit CompareNilICStub(NilValue nil) : nil_value_(nil) { } + CompareNilICStub(Isolate* isolate, NilValue nil) + : HydrogenCodeStub(isolate), nil_value_(nil) { } - CompareNilICStub(ExtraICState ic_state, + CompareNilICStub(Isolate* isolate, + ExtraICState ic_state, InitializationState init_state = INITIALIZED) - : HydrogenCodeStub(init_state), + : HydrogenCodeStub(isolate, init_state), nil_value_(NilValueField::decode(ic_state)), state_(State(TypesField::decode(ic_state))) { } static Handle<Code> GetUninitialized(Isolate* isolate, NilValue nil) { - return CompareNilICStub(nil, UNINITIALIZED).GetCode(isolate); + return CompareNilICStub(isolate, nil, UNINITIALIZED).GetCode(); } virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; static void InstallDescriptors(Isolate* isolate) { - CompareNilICStub compare_stub(kNullValue, UNINITIALIZED); + CompareNilICStub compare_stub(isolate, kNullValue, UNINITIALIZED); compare_stub.InitializeInterfaceDescriptor( - isolate, isolate->code_stub_interface_descriptor(CodeStub::CompareNilIC)); } @@ -1431,7 +1445,7 @@ class CompareNilICStub : public HydrogenCodeStub { virtual Code::Kind GetCodeKind() const { return Code::COMPARE_NIL_IC; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual ExtraICState GetExtraICState() { return NilValueField::encode(nil_value_) | @@ -1471,8 +1485,10 @@ class CompareNilICStub : public HydrogenCodeStub { void Print(StringStream* stream) const; }; - CompareNilICStub(NilValue nil, InitializationState init_state) - : HydrogenCodeStub(init_state), nil_value_(nil) { } + CompareNilICStub(Isolate* isolate, + NilValue nil, + InitializationState init_state) + : HydrogenCodeStub(isolate, init_state), nil_value_(nil) { } class NilValueField : public BitField<NilValue, 0, 1> {}; class TypesField : public BitField<byte, 1, NUMBER_OF_TYPES> {}; @@ -1489,9 +1505,12 @@ class CompareNilICStub : public HydrogenCodeStub { class CEntryStub : public PlatformCodeStub { public: - explicit CEntryStub(int result_size, - SaveFPRegsMode save_doubles = kDontSaveFPRegs) - : result_size_(result_size), save_doubles_(save_doubles) { } + CEntryStub(Isolate* isolate, + int result_size, + SaveFPRegsMode save_doubles = kDontSaveFPRegs) + : PlatformCodeStub(isolate), + result_size_(result_size), + save_doubles_(save_doubles) { } void Generate(MacroAssembler* masm); @@ -1502,19 +1521,12 @@ class CEntryStub : public PlatformCodeStub { static void GenerateAheadOfTime(Isolate* isolate); protected: - virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE { + virtual void VerifyPlatformFeatures() V8_OVERRIDE { ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2)); }; private: - void GenerateCore(MacroAssembler* masm, - Label* throw_normal_exception, - Label* throw_termination_exception, - bool do_gc, - bool always_allocate_scope); - // Number of pointers/values returned. - Isolate* isolate_; const int result_size_; SaveFPRegsMode save_doubles_; @@ -1527,7 +1539,7 @@ class CEntryStub : public PlatformCodeStub { class JSEntryStub : public PlatformCodeStub { public: - JSEntryStub() { } + explicit JSEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) { } void Generate(MacroAssembler* masm) { GenerateBody(masm, false); } @@ -1546,7 +1558,7 @@ class JSEntryStub : public PlatformCodeStub { class JSConstructEntryStub : public JSEntryStub { public: - JSConstructEntryStub() { } + explicit JSConstructEntryStub(Isolate* isolate) : JSEntryStub(isolate) { } void Generate(MacroAssembler* masm) { GenerateBody(masm, true); } @@ -1568,7 +1580,8 @@ class ArgumentsAccessStub: public PlatformCodeStub { NEW_STRICT }; - explicit ArgumentsAccessStub(Type type) : type_(type) { } + ArgumentsAccessStub(Isolate* isolate, Type type) + : PlatformCodeStub(isolate), type_(type) { } private: Type type_; @@ -1588,7 +1601,7 @@ class ArgumentsAccessStub: public PlatformCodeStub { class RegExpExecStub: public PlatformCodeStub { public: - RegExpExecStub() { } + explicit RegExpExecStub(Isolate* isolate) : PlatformCodeStub(isolate) { } private: Major MajorKey() { return RegExpExec; } @@ -1600,12 +1613,12 @@ class RegExpExecStub: public PlatformCodeStub { class RegExpConstructResultStub V8_FINAL : public HydrogenCodeStub { public: - RegExpConstructResultStub() { } + explicit RegExpConstructResultStub(Isolate* isolate) + : HydrogenCodeStub(isolate) { } - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; virtual Major MajorKey() V8_OVERRIDE { return RegExpConstructResult; } @@ -1625,15 +1638,11 @@ class RegExpConstructResultStub V8_FINAL : public HydrogenCodeStub { class CallFunctionStub: public PlatformCodeStub { public: - CallFunctionStub(int argc, CallFunctionFlags flags) - : argc_(argc), flags_(flags) { } + CallFunctionStub(Isolate* isolate, int argc, CallFunctionFlags flags) + : PlatformCodeStub(isolate), argc_(argc), flags_(flags) { } void Generate(MacroAssembler* masm); - virtual void FinishCode(Handle<Code> code) { - code->set_has_function_cache(RecordCallTarget()); - } - static int ExtractArgcFromMinorKey(int minor_key) { return ArgcBits::decode(minor_key); } @@ -1654,10 +1663,6 @@ class CallFunctionStub: public PlatformCodeStub { return FlagBits::encode(flags_) | ArgcBits::encode(argc_); } - bool RecordCallTarget() { - return flags_ == RECORD_CALL_TARGET; - } - bool CallAsMethod() { return flags_ == CALL_AS_METHOD || flags_ == WRAP_AND_CALL; } @@ -1670,7 +1675,8 @@ class CallFunctionStub: public PlatformCodeStub { class CallConstructStub: public PlatformCodeStub { public: - explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {} + CallConstructStub(Isolate* isolate, CallConstructorFlags flags) + : PlatformCodeStub(isolate), flags_(flags) {} void Generate(MacroAssembler* masm); @@ -1679,7 +1685,7 @@ class CallConstructStub: public PlatformCodeStub { } private: - CallFunctionFlags flags_; + CallConstructorFlags flags_; virtual void PrintName(StringStream* stream); @@ -1687,11 +1693,7 @@ class CallConstructStub: public PlatformCodeStub { int MinorKey() { return flags_; } bool RecordCallTarget() { - return (flags_ & RECORD_CALL_TARGET) != 0; - } - - bool CallAsMethod() { - return (flags_ & CALL_AS_METHOD) != 0; + return (flags_ & RECORD_CONSTRUCTOR_TARGET) != 0; } }; @@ -1872,12 +1874,12 @@ class StringCharAtGenerator { class KeyedLoadDictionaryElementStub : public HydrogenCodeStub { public: - KeyedLoadDictionaryElementStub() {} + explicit KeyedLoadDictionaryElementStub(Isolate* isolate) + : HydrogenCodeStub(isolate) {} - virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE; + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: @@ -1890,7 +1892,8 @@ class KeyedLoadDictionaryElementStub : public HydrogenCodeStub { class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub { public: - KeyedLoadDictionaryElementPlatformStub() {} + explicit KeyedLoadDictionaryElementPlatformStub(Isolate* isolate) + : PlatformCodeStub(isolate) {} void Generate(MacroAssembler* masm); @@ -1904,18 +1907,21 @@ class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub { class DoubleToIStub : public PlatformCodeStub { public: - DoubleToIStub(Register source, + DoubleToIStub(Isolate* isolate, + Register source, Register destination, int offset, bool is_truncating, - bool skip_fastpath = false) : bit_field_(0) { + bool skip_fastpath = false) + : PlatformCodeStub(isolate), bit_field_(0) { bit_field_ = SourceRegisterBits::encode(source.code()) | DestinationRegisterBits::encode(destination.code()) | OffsetBits::encode(offset) | IsTruncatingBits::encode(is_truncating) | SkipFastPathBits::encode(skip_fastpath) | - SSEBits::encode(CpuFeatures::IsSafeForSnapshot(SSE2) ? - CpuFeatures::IsSafeForSnapshot(SSE3) ? 2 : 1 : 0); + SSEBits::encode( + CpuFeatures::IsSafeForSnapshot(isolate, SSE2) ? + CpuFeatures::IsSafeForSnapshot(isolate, SSE3) ? 2 : 1 : 0); } Register source() { @@ -1943,7 +1949,7 @@ class DoubleToIStub : public PlatformCodeStub { virtual bool SometimesSetsUpAFrame() { return false; } protected: - virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE { + virtual void VerifyPlatformFeatures() V8_OVERRIDE { ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2)); } @@ -1975,7 +1981,10 @@ class DoubleToIStub : public PlatformCodeStub { class KeyedLoadFastElementStub : public HydrogenCodeStub { public: - KeyedLoadFastElementStub(bool is_js_array, ElementsKind elements_kind) { + KeyedLoadFastElementStub(Isolate* isolate, + bool is_js_array, + ElementsKind elements_kind) + : HydrogenCodeStub(isolate) { bit_field_ = ElementsKindBits::encode(elements_kind) | IsJSArrayBits::encode(is_js_array); } @@ -1988,11 +1997,10 @@ class KeyedLoadFastElementStub : public HydrogenCodeStub { return ElementsKindBits::decode(bit_field_); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: class ElementsKindBits: public BitField<ElementsKind, 0, 8> {}; @@ -2008,9 +2016,11 @@ class KeyedLoadFastElementStub : public HydrogenCodeStub { class KeyedStoreFastElementStub : public HydrogenCodeStub { public: - KeyedStoreFastElementStub(bool is_js_array, + KeyedStoreFastElementStub(Isolate* isolate, + bool is_js_array, ElementsKind elements_kind, - KeyedAccessStoreMode mode) { + KeyedAccessStoreMode mode) + : HydrogenCodeStub(isolate) { bit_field_ = ElementsKindBits::encode(elements_kind) | IsJSArrayBits::encode(is_js_array) | StoreModeBits::encode(mode); @@ -2028,11 +2038,10 @@ class KeyedStoreFastElementStub : public HydrogenCodeStub { return StoreModeBits::decode(bit_field_); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: class ElementsKindBits: public BitField<ElementsKind, 0, 8> {}; @@ -2049,9 +2058,10 @@ class KeyedStoreFastElementStub : public HydrogenCodeStub { class TransitionElementsKindStub : public HydrogenCodeStub { public: - TransitionElementsKindStub(ElementsKind from_kind, + TransitionElementsKindStub(Isolate* isolate, + ElementsKind from_kind, ElementsKind to_kind, - bool is_js_array) { + bool is_js_array) : HydrogenCodeStub(isolate) { bit_field_ = FromKindBits::encode(from_kind) | ToKindBits::encode(to_kind) | IsJSArrayBits::encode(is_js_array); @@ -2069,11 +2079,10 @@ class TransitionElementsKindStub : public HydrogenCodeStub { return IsJSArrayBits::decode(bit_field_); } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: class FromKindBits: public BitField<ElementsKind, 8, 8> {}; @@ -2090,8 +2099,10 @@ class TransitionElementsKindStub : public HydrogenCodeStub { class ArrayConstructorStubBase : public HydrogenCodeStub { public: - ArrayConstructorStubBase(ElementsKind kind, - AllocationSiteOverrideMode override_mode) { + ArrayConstructorStubBase(Isolate* isolate, + ElementsKind kind, + AllocationSiteOverrideMode override_mode) + : HydrogenCodeStub(isolate) { // It only makes sense to override local allocation site behavior // if there is a difference between the global allocation site policy // for an ElementsKind and the desired usage of the stub. @@ -2137,16 +2148,16 @@ class ArrayConstructorStubBase : public HydrogenCodeStub { class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase { public: ArrayNoArgumentConstructorStub( + Isolate* isolate, ElementsKind kind, AllocationSiteOverrideMode override_mode = DONT_OVERRIDE) - : ArrayConstructorStubBase(kind, override_mode) { + : ArrayConstructorStubBase(isolate, kind, override_mode) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return ArrayNoArgumentConstructor; } @@ -2162,16 +2173,16 @@ class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase { class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase { public: ArraySingleArgumentConstructorStub( + Isolate* isolate, ElementsKind kind, AllocationSiteOverrideMode override_mode = DONT_OVERRIDE) - : ArrayConstructorStubBase(kind, override_mode) { + : ArrayConstructorStubBase(isolate, kind, override_mode) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return ArraySingleArgumentConstructor; } @@ -2187,16 +2198,16 @@ class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase { class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase { public: ArrayNArgumentsConstructorStub( + Isolate* isolate, ElementsKind kind, AllocationSiteOverrideMode override_mode = DONT_OVERRIDE) - : ArrayConstructorStubBase(kind, override_mode) { + : ArrayConstructorStubBase(isolate, kind, override_mode) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return ArrayNArgumentsConstructor; } @@ -2211,7 +2222,8 @@ class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase { class InternalArrayConstructorStubBase : public HydrogenCodeStub { public: - explicit InternalArrayConstructorStubBase(ElementsKind kind) { + InternalArrayConstructorStubBase(Isolate* isolate, ElementsKind kind) + : HydrogenCodeStub(isolate) { kind_ = kind; } @@ -2235,14 +2247,14 @@ class InternalArrayConstructorStubBase : public HydrogenCodeStub { class InternalArrayNoArgumentConstructorStub : public InternalArrayConstructorStubBase { public: - explicit InternalArrayNoArgumentConstructorStub(ElementsKind kind) - : InternalArrayConstructorStubBase(kind) { } + InternalArrayNoArgumentConstructorStub(Isolate* isolate, + ElementsKind kind) + : InternalArrayConstructorStubBase(isolate, kind) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return InternalArrayNoArgumentConstructor; } @@ -2254,14 +2266,14 @@ class InternalArrayNoArgumentConstructorStub : public class InternalArraySingleArgumentConstructorStub : public InternalArrayConstructorStubBase { public: - explicit InternalArraySingleArgumentConstructorStub(ElementsKind kind) - : InternalArrayConstructorStubBase(kind) { } + InternalArraySingleArgumentConstructorStub(Isolate* isolate, + ElementsKind kind) + : InternalArrayConstructorStubBase(isolate, kind) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return InternalArraySingleArgumentConstructor; } @@ -2273,14 +2285,13 @@ class InternalArraySingleArgumentConstructorStub : public class InternalArrayNArgumentsConstructorStub : public InternalArrayConstructorStubBase { public: - explicit InternalArrayNArgumentsConstructorStub(ElementsKind kind) - : InternalArrayConstructorStubBase(kind) { } + InternalArrayNArgumentsConstructorStub(Isolate* isolate, ElementsKind kind) + : InternalArrayConstructorStubBase(isolate, kind) { } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: Major MajorKey() { return InternalArrayNArgumentsConstructor; } @@ -2291,10 +2302,12 @@ class InternalArrayNArgumentsConstructorStub : public class KeyedStoreElementStub : public PlatformCodeStub { public: - KeyedStoreElementStub(bool is_js_array, + KeyedStoreElementStub(Isolate* isolate, + bool is_js_array, ElementsKind elements_kind, KeyedAccessStoreMode store_mode) - : is_js_array_(is_js_array), + : PlatformCodeStub(isolate), + is_js_array_(is_js_array), elements_kind_(elements_kind), store_mode_(store_mode), fp_registers_(CanUseFPRegisters()) { } @@ -2357,18 +2370,17 @@ class ToBooleanStub: public HydrogenCodeStub { static Types Generic() { return Types((1 << NUMBER_OF_TYPES) - 1); } }; - explicit ToBooleanStub(Types types = Types()) - : types_(types) { } - explicit ToBooleanStub(ExtraICState state) - : types_(static_cast<byte>(state)) { } + ToBooleanStub(Isolate* isolate, Types types = Types()) + : HydrogenCodeStub(isolate), types_(types) { } + ToBooleanStub(Isolate* isolate, ExtraICState state) + : HydrogenCodeStub(isolate), types_(static_cast<byte>(state)) { } bool UpdateStatus(Handle<Object> object); Types GetTypes() { return types_; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; virtual Code::Kind GetCodeKind() const { return Code::TO_BOOLEAN_IC; } virtual void PrintState(StringStream* stream); @@ -2376,14 +2388,13 @@ class ToBooleanStub: public HydrogenCodeStub { virtual bool SometimesSetsUpAFrame() { return false; } static void InstallDescriptors(Isolate* isolate) { - ToBooleanStub stub; + ToBooleanStub stub(isolate); stub.InitializeInterfaceDescriptor( - isolate, isolate->code_stub_interface_descriptor(CodeStub::ToBoolean)); } static Handle<Code> GetUninitialized(Isolate* isolate) { - return ToBooleanStub(UNINITIALIZED).GetCode(isolate); + return ToBooleanStub(isolate, UNINITIALIZED).GetCode(); } virtual ExtraICState GetExtraICState() { @@ -2402,8 +2413,8 @@ class ToBooleanStub: public HydrogenCodeStub { Major MajorKey() { return ToBoolean; } int NotMissMinorKey() { return GetExtraICState(); } - explicit ToBooleanStub(InitializationState init_state) : - HydrogenCodeStub(init_state) {} + ToBooleanStub(Isolate* isolate, InitializationState init_state) : + HydrogenCodeStub(isolate, init_state) {} Types types_; }; @@ -2411,11 +2422,13 @@ class ToBooleanStub: public HydrogenCodeStub { class ElementsTransitionAndStoreStub : public HydrogenCodeStub { public: - ElementsTransitionAndStoreStub(ElementsKind from_kind, + ElementsTransitionAndStoreStub(Isolate* isolate, + ElementsKind from_kind, ElementsKind to_kind, bool is_jsarray, KeyedAccessStoreMode store_mode) - : from_kind_(from_kind), + : HydrogenCodeStub(isolate), + from_kind_(from_kind), to_kind_(to_kind), is_jsarray_(is_jsarray), store_mode_(store_mode) {} @@ -2425,11 +2438,10 @@ class ElementsTransitionAndStoreStub : public HydrogenCodeStub { bool is_jsarray() const { return is_jsarray_; } KeyedAccessStoreMode store_mode() const { return store_mode_; } - virtual Handle<Code> GenerateCode(Isolate* isolate); + virtual Handle<Code> GenerateCode() V8_OVERRIDE; - void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); + virtual void InitializeInterfaceDescriptor( + CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; private: class FromBits: public BitField<ElementsKind, 0, 8> {}; @@ -2456,8 +2468,8 @@ class ElementsTransitionAndStoreStub : public HydrogenCodeStub { class StoreArrayLiteralElementStub : public PlatformCodeStub { public: - StoreArrayLiteralElementStub() - : fp_registers_(CanUseFPRegisters()) { } + explicit StoreArrayLiteralElementStub(Isolate* isolate) + : PlatformCodeStub(isolate), fp_registers_(CanUseFPRegisters()) { } private: class FPRegisters: public BitField<bool, 0, 1> {}; @@ -2475,8 +2487,10 @@ class StoreArrayLiteralElementStub : public PlatformCodeStub { class StubFailureTrampolineStub : public PlatformCodeStub { public: - explicit StubFailureTrampolineStub(StubFunctionMode function_mode) - : fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {} + StubFailureTrampolineStub(Isolate* isolate, StubFunctionMode function_mode) + : PlatformCodeStub(isolate), + fp_registers_(CanUseFPRegisters()), + function_mode_(function_mode) {} static void GenerateAheadOfTime(Isolate* isolate); @@ -2501,7 +2515,7 @@ class StubFailureTrampolineStub : public PlatformCodeStub { class ProfileEntryHookStub : public PlatformCodeStub { public: - explicit ProfileEntryHookStub() {} + explicit ProfileEntryHookStub(Isolate* isolate) : PlatformCodeStub(isolate) {} // The profile entry hook function is not allowed to cause a GC. virtual bool SometimesSetsUpAFrame() { return false; } diff --git a/deps/v8/src/code.h b/deps/v8/src/code.h index 791420cf3..40a6950d2 100644 --- a/deps/v8/src/code.h +++ b/deps/v8/src/code.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CODE_H_ #define V8_CODE_H_ diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc index ea0ead310..9da4ea21e 100644 --- a/deps/v8/src/codegen.cc +++ b/deps/v8/src/codegen.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -40,6 +17,65 @@ namespace v8 { namespace internal { + +#if defined(_WIN64) +typedef double (*ModuloFunction)(double, double); +static ModuloFunction modulo_function = NULL; +// Defined in codegen-x64.cc. +ModuloFunction CreateModuloFunction(); + +void init_modulo_function() { + modulo_function = CreateModuloFunction(); +} + + +double modulo(double x, double y) { + // Note: here we rely on dependent reads being ordered. This is true + // on all architectures we currently support. + return (*modulo_function)(x, y); +} +#elif defined(_WIN32) + +double modulo(double x, double y) { + // Workaround MS fmod bugs. ECMA-262 says: + // dividend is finite and divisor is an infinity => result equals dividend + // dividend is a zero and divisor is nonzero finite => result equals dividend + if (!(std::isfinite(x) && (!std::isfinite(y) && !std::isnan(y))) && + !(x == 0 && (y != 0 && std::isfinite(y)))) { + x = fmod(x, y); + } + return x; +} +#else // POSIX + +double modulo(double x, double y) { + return std::fmod(x, y); +} +#endif // defined(_WIN64) + + +#define UNARY_MATH_FUNCTION(name, generator) \ +static UnaryMathFunction fast_##name##_function = NULL; \ +void init_fast_##name##_function() { \ + fast_##name##_function = generator; \ +} \ +double fast_##name(double x) { \ + return (*fast_##name##_function)(x); \ +} + +UNARY_MATH_FUNCTION(exp, CreateExpFunction()) +UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction()) + +#undef UNARY_MATH_FUNCTION + + +void lazily_initialize_fast_exp() { + if (fast_exp_function == NULL) { + init_fast_exp_function(); + } +} + + #define __ ACCESS_MASM(masm_) #ifdef DEBUG @@ -188,21 +224,6 @@ void CodeGenerator::PrintCode(Handle<Code> code, CompilationInfo* info) { } -bool CodeGenerator::ShouldGenerateLog(Isolate* isolate, Expression* type) { - ASSERT(type != NULL); - if (!isolate->logger()->is_logging() && - !isolate->cpu_profiler()->is_profiling()) { - return false; - } - Handle<String> name = Handle<String>::cast(type->AsLiteral()->value()); - if (FLAG_log_regexp) { - if (name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("regexp"))) - return true; - } - return false; -} - - bool CodeGenerator::RecordPositions(MacroAssembler* masm, int pos, bool right_here) { diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h index 6b5f9513e..fbaee97c8 100644 --- a/deps/v8/src/codegen.h +++ b/deps/v8/src/codegen.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CODEGEN_H_ #define V8_CODEGEN_H_ @@ -102,8 +79,6 @@ class CodeGenerator { // Print the code after compiling it. static void PrintCode(Handle<Code> code, CompilationInfo* info); - static bool ShouldGenerateLog(Isolate* isolate, Expression* type); - static bool RecordPositions(MacroAssembler* masm, int pos, bool right_here = false); @@ -122,6 +97,18 @@ UnaryMathFunction CreateExpFunction(); UnaryMathFunction CreateSqrtFunction(); +double modulo(double x, double y); + +// Custom implementation of math functions. +double fast_exp(double input); +double fast_sqrt(double input); +#ifdef _WIN64 +void init_modulo_function(); +#endif +void lazily_initialize_fast_exp(); +void init_fast_sqrt_function(); + + class ElementsTransitionGenerator : public AllStatic { public: // If |mode| is set to DONT_TRACK_ALLOCATION_SITE, @@ -143,6 +130,33 @@ class ElementsTransitionGenerator : public AllStatic { static const int kNumberDictionaryProbes = 4; +class CodeAgingHelper { + public: + CodeAgingHelper(); + + uint32_t young_sequence_length() const { return young_sequence_.length(); } + bool IsYoung(byte* candidate) const { + return memcmp(candidate, + young_sequence_.start(), + young_sequence_.length()) == 0; + } + void CopyYoungSequenceTo(byte* new_buffer) const { + CopyBytes(new_buffer, young_sequence_.start(), young_sequence_.length()); + } + +#ifdef DEBUG + bool IsOld(byte* candidate) const; +#endif + + protected: + const EmbeddedVector<byte, kNoCodeAgeSequenceLength> young_sequence_; +#ifdef DEBUG +#ifdef V8_TARGET_ARCH_ARM64 + const EmbeddedVector<byte, kNoCodeAgeSequenceLength> old_sequence_; +#endif +#endif +}; + } } // namespace v8::internal #endif // V8_CODEGEN_H_ diff --git a/deps/v8/src/collection.js b/deps/v8/src/collection.js index 9054187a1..f8f3fa995 100644 --- a/deps/v8/src/collection.js +++ b/deps/v8/src/collection.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; @@ -113,8 +90,29 @@ function SetClear() { throw MakeTypeError('incompatible_method_receiver', ['Set.prototype.clear', this]); } - // Replace the internal table with a new empty table. - %SetInitialize(this); + %SetClear(this); +} + + +function SetForEach(f, receiver) { + if (!IS_SET(this)) { + throw MakeTypeError('incompatible_method_receiver', + ['Set.prototype.forEach', this]); + } + + if (!IS_SPEC_FUNCTION(f)) { + throw MakeTypeError('called_non_callable', [f]); + } + + var iterator = %SetCreateIterator(this, ITERATOR_KIND_VALUES); + var entry; + try { + while (!(entry = %SetIteratorNext(iterator)).done) { + %_CallFunction(receiver, entry.value, entry.value, this, f); + } + } finally { + %SetIteratorClose(iterator); + } } @@ -127,13 +125,16 @@ function SetUpSet() { %FunctionSetPrototype($Set, new $Object()); %SetProperty($Set.prototype, "constructor", $Set, DONT_ENUM); + %FunctionSetLength(SetForEach, 1); + // Set up the non-enumerable functions on the Set prototype object. InstallGetter($Set.prototype, "size", SetGetSize); InstallFunctions($Set.prototype, DONT_ENUM, $Array( "add", SetAdd, "has", SetHas, "delete", SetDelete, - "clear", SetClear + "clear", SetClear, + "forEach", SetForEach )); } @@ -202,8 +203,29 @@ function MapClear() { throw MakeTypeError('incompatible_method_receiver', ['Map.prototype.clear', this]); } - // Replace the internal table with a new empty table. - %MapInitialize(this); + %MapClear(this); +} + + +function MapForEach(f, receiver) { + if (!IS_MAP(this)) { + throw MakeTypeError('incompatible_method_receiver', + ['Map.prototype.forEach', this]); + } + + if (!IS_SPEC_FUNCTION(f)) { + throw MakeTypeError('called_non_callable', [f]); + } + + var iterator = %MapCreateIterator(this, ITERATOR_KIND_ENTRIES); + var entry; + try { + while (!(entry = %MapIteratorNext(iterator)).done) { + %_CallFunction(receiver, entry.value[1], entry.value[0], this, f); + } + } finally { + %MapIteratorClose(iterator); + } } @@ -216,6 +238,8 @@ function SetUpMap() { %FunctionSetPrototype($Map, new $Object()); %SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM); + %FunctionSetLength(MapForEach, 1); + // Set up the non-enumerable functions on the Map prototype object. InstallGetter($Map.prototype, "size", MapGetSize); InstallFunctions($Map.prototype, DONT_ENUM, $Array( @@ -223,7 +247,8 @@ function SetUpMap() { "set", MapSet, "has", MapHas, "delete", MapDelete, - "clear", MapClear + "clear", MapClear, + "forEach", MapForEach )); } diff --git a/deps/v8/src/compilation-cache.cc b/deps/v8/src/compilation-cache.cc index 54d4565e2..42a94fc74 100644 --- a/deps/v8/src/compilation-cache.cc +++ b/deps/v8/src/compilation-cache.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -65,18 +42,11 @@ CompilationCache::CompilationCache(Isolate* isolate) CompilationCache::~CompilationCache() {} -static Handle<CompilationCacheTable> AllocateTable(Isolate* isolate, int size) { - CALL_HEAP_FUNCTION(isolate, - CompilationCacheTable::Allocate(isolate->heap(), size), - CompilationCacheTable); -} - - Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) { ASSERT(generation < generations_); Handle<CompilationCacheTable> result; if (tables_[generation]->IsUndefined()) { - result = AllocateTable(isolate(), kInitialCacheSize); + result = CompilationCacheTable::New(isolate(), kInitialCacheSize); tables_[generation] = *result; } else { CompilationCacheTable* table = @@ -161,7 +131,8 @@ bool CompilationCacheScript::HasOrigin( // Were both scripts tagged by the embedder as being shared cross-origin? if (is_shared_cross_origin != script->is_shared_cross_origin()) return false; // Compare the two name strings for equality. - return String::cast(*name)->Equals(String::cast(script->name())); + return String::Equals(Handle<String>::cast(name), + Handle<String>(String::cast(script->name()))); } @@ -184,7 +155,7 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup( { HandleScope scope(isolate()); for (generation = 0; generation < generations(); generation++) { Handle<CompilationCacheTable> table = GetTable(generation); - Handle<Object> probe(table->Lookup(*source, *context), isolate()); + Handle<Object> probe = table->Lookup(source, context); if (probe->IsSharedFunctionInfo()) { Handle<SharedFunctionInfo> function_info = Handle<SharedFunctionInfo>::cast(probe); @@ -239,153 +210,93 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup( } -MaybeObject* CompilationCacheScript::TryTablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info) { - Handle<CompilationCacheTable> table = GetFirstTable(); - return table->Put(*source, *context, *function_info); -} - - -Handle<CompilationCacheTable> CompilationCacheScript::TablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info) { - CALL_HEAP_FUNCTION(isolate(), - TryTablePut(source, context, function_info), - CompilationCacheTable); -} - - void CompilationCacheScript::Put(Handle<String> source, Handle<Context> context, Handle<SharedFunctionInfo> function_info) { HandleScope scope(isolate()); - SetFirstTable(TablePut(source, context, function_info)); + Handle<CompilationCacheTable> table = GetFirstTable(); + SetFirstTable( + CompilationCacheTable::Put(table, source, context, function_info)); } -Handle<SharedFunctionInfo> CompilationCacheEval::Lookup( +MaybeHandle<SharedFunctionInfo> CompilationCacheEval::Lookup( Handle<String> source, Handle<Context> context, StrictMode strict_mode, int scope_position) { + HandleScope scope(isolate()); // Make sure not to leak the table into the surrounding handle // scope. Otherwise, we risk keeping old tables around even after // having cleared the cache. - Object* result = NULL; + Handle<Object> result = isolate()->factory()->undefined_value(); int generation; - { HandleScope scope(isolate()); - for (generation = 0; generation < generations(); generation++) { - Handle<CompilationCacheTable> table = GetTable(generation); - result = table->LookupEval( - *source, *context, strict_mode, scope_position); - if (result->IsSharedFunctionInfo()) { - break; - } - } + for (generation = 0; generation < generations(); generation++) { + Handle<CompilationCacheTable> table = GetTable(generation); + result = table->LookupEval(source, context, strict_mode, scope_position); + if (result->IsSharedFunctionInfo()) break; } if (result->IsSharedFunctionInfo()) { - Handle<SharedFunctionInfo> - function_info(SharedFunctionInfo::cast(result), isolate()); + Handle<SharedFunctionInfo> function_info = + Handle<SharedFunctionInfo>::cast(result); if (generation != 0) { Put(source, context, function_info, scope_position); } isolate()->counters()->compilation_cache_hits()->Increment(); - return function_info; + return scope.CloseAndEscape(function_info); } else { isolate()->counters()->compilation_cache_misses()->Increment(); - return Handle<SharedFunctionInfo>::null(); + return MaybeHandle<SharedFunctionInfo>(); } } -MaybeObject* CompilationCacheEval::TryTablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info, - int scope_position) { - Handle<CompilationCacheTable> table = GetFirstTable(); - return table->PutEval(*source, *context, *function_info, scope_position); -} - - -Handle<CompilationCacheTable> CompilationCacheEval::TablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info, - int scope_position) { - CALL_HEAP_FUNCTION(isolate(), - TryTablePut( - source, context, function_info, scope_position), - CompilationCacheTable); -} - - void CompilationCacheEval::Put(Handle<String> source, Handle<Context> context, Handle<SharedFunctionInfo> function_info, int scope_position) { HandleScope scope(isolate()); - SetFirstTable(TablePut(source, context, function_info, scope_position)); + Handle<CompilationCacheTable> table = GetFirstTable(); + table = CompilationCacheTable::PutEval(table, source, context, + function_info, scope_position); + SetFirstTable(table); } -Handle<FixedArray> CompilationCacheRegExp::Lookup(Handle<String> source, - JSRegExp::Flags flags) { +MaybeHandle<FixedArray> CompilationCacheRegExp::Lookup( + Handle<String> source, + JSRegExp::Flags flags) { + HandleScope scope(isolate()); // Make sure not to leak the table into the surrounding handle // scope. Otherwise, we risk keeping old tables around even after // having cleared the cache. - Object* result = NULL; + Handle<Object> result = isolate()->factory()->undefined_value(); int generation; - { HandleScope scope(isolate()); - for (generation = 0; generation < generations(); generation++) { - Handle<CompilationCacheTable> table = GetTable(generation); - result = table->LookupRegExp(*source, flags); - if (result->IsFixedArray()) { - break; - } - } + for (generation = 0; generation < generations(); generation++) { + Handle<CompilationCacheTable> table = GetTable(generation); + result = table->LookupRegExp(source, flags); + if (result->IsFixedArray()) break; } if (result->IsFixedArray()) { - Handle<FixedArray> data(FixedArray::cast(result), isolate()); + Handle<FixedArray> data = Handle<FixedArray>::cast(result); if (generation != 0) { Put(source, flags, data); } isolate()->counters()->compilation_cache_hits()->Increment(); - return data; + return scope.CloseAndEscape(data); } else { isolate()->counters()->compilation_cache_misses()->Increment(); - return Handle<FixedArray>::null(); + return MaybeHandle<FixedArray>(); } } -MaybeObject* CompilationCacheRegExp::TryTablePut( - Handle<String> source, - JSRegExp::Flags flags, - Handle<FixedArray> data) { - Handle<CompilationCacheTable> table = GetFirstTable(); - return table->PutRegExp(*source, flags, *data); -} - - -Handle<CompilationCacheTable> CompilationCacheRegExp::TablePut( - Handle<String> source, - JSRegExp::Flags flags, - Handle<FixedArray> data) { - CALL_HEAP_FUNCTION(isolate(), - TryTablePut(source, flags, data), - CompilationCacheTable); -} - - void CompilationCacheRegExp::Put(Handle<String> source, JSRegExp::Flags flags, Handle<FixedArray> data) { HandleScope scope(isolate()); - SetFirstTable(TablePut(source, flags, data)); + Handle<CompilationCacheTable> table = GetFirstTable(); + SetFirstTable(CompilationCacheTable::PutRegExp(table, source, flags, data)); } @@ -398,36 +309,28 @@ void CompilationCache::Remove(Handle<SharedFunctionInfo> function_info) { } -Handle<SharedFunctionInfo> CompilationCache::LookupScript( +MaybeHandle<SharedFunctionInfo> CompilationCache::LookupScript( Handle<String> source, Handle<Object> name, int line_offset, int column_offset, bool is_shared_cross_origin, Handle<Context> context) { - if (!IsEnabled()) { - return Handle<SharedFunctionInfo>::null(); - } + if (!IsEnabled()) return MaybeHandle<SharedFunctionInfo>(); - return script_.Lookup(source, - name, - line_offset, - column_offset, - is_shared_cross_origin, - context); + return script_.Lookup(source, name, line_offset, column_offset, + is_shared_cross_origin, context); } -Handle<SharedFunctionInfo> CompilationCache::LookupEval( +MaybeHandle<SharedFunctionInfo> CompilationCache::LookupEval( Handle<String> source, Handle<Context> context, StrictMode strict_mode, int scope_position) { - if (!IsEnabled()) { - return Handle<SharedFunctionInfo>::null(); - } + if (!IsEnabled()) return MaybeHandle<SharedFunctionInfo>(); - Handle<SharedFunctionInfo> result; + MaybeHandle<SharedFunctionInfo> result; if (context->IsNativeContext()) { result = eval_global_.Lookup( source, context, strict_mode, scope_position); @@ -440,11 +343,9 @@ Handle<SharedFunctionInfo> CompilationCache::LookupEval( } -Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source, +MaybeHandle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source, JSRegExp::Flags flags) { - if (!IsEnabled()) { - return Handle<FixedArray>::null(); - } + if (!IsEnabled()) return MaybeHandle<FixedArray>(); return reg_exp_.Lookup(source, flags); } diff --git a/deps/v8/src/compilation-cache.h b/deps/v8/src/compilation-cache.h index b31de3111..baa53fb45 100644 --- a/deps/v8/src/compilation-cache.h +++ b/deps/v8/src/compilation-cache.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_COMPILATION_CACHE_H_ #define V8_COMPILATION_CACHE_H_ @@ -106,17 +83,6 @@ class CompilationCacheScript : public CompilationSubCache { Handle<SharedFunctionInfo> function_info); private: - MUST_USE_RESULT MaybeObject* TryTablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info); - - // Note: Returns a new hash table if operation results in expansion. - Handle<CompilationCacheTable> TablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info); - bool HasOrigin(Handle<SharedFunctionInfo> function_info, Handle<Object> name, int line_offset, @@ -147,10 +113,10 @@ class CompilationCacheEval: public CompilationSubCache { CompilationCacheEval(Isolate* isolate, int generations) : CompilationSubCache(isolate, generations) { } - Handle<SharedFunctionInfo> Lookup(Handle<String> source, - Handle<Context> context, - StrictMode strict_mode, - int scope_position); + MaybeHandle<SharedFunctionInfo> Lookup(Handle<String> source, + Handle<Context> context, + StrictMode strict_mode, + int scope_position); void Put(Handle<String> source, Handle<Context> context, @@ -158,19 +124,6 @@ class CompilationCacheEval: public CompilationSubCache { int scope_position); private: - MUST_USE_RESULT MaybeObject* TryTablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info, - int scope_position); - - // Note: Returns a new hash table if operation results in expansion. - Handle<CompilationCacheTable> TablePut( - Handle<String> source, - Handle<Context> context, - Handle<SharedFunctionInfo> function_info, - int scope_position); - DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheEval); }; @@ -181,21 +134,12 @@ class CompilationCacheRegExp: public CompilationSubCache { CompilationCacheRegExp(Isolate* isolate, int generations) : CompilationSubCache(isolate, generations) { } - Handle<FixedArray> Lookup(Handle<String> source, JSRegExp::Flags flags); + MaybeHandle<FixedArray> Lookup(Handle<String> source, JSRegExp::Flags flags); void Put(Handle<String> source, JSRegExp::Flags flags, Handle<FixedArray> data); private: - MUST_USE_RESULT MaybeObject* TryTablePut(Handle<String> source, - JSRegExp::Flags flags, - Handle<FixedArray> data); - - // Note: Returns a new hash table if operation results in expansion. - Handle<CompilationCacheTable> TablePut(Handle<String> source, - JSRegExp::Flags flags, - Handle<FixedArray> data); - DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheRegExp); }; @@ -209,25 +153,21 @@ class CompilationCache { // Finds the script shared function info for a source // string. Returns an empty handle if the cache doesn't contain a // script for the given source string with the right origin. - Handle<SharedFunctionInfo> LookupScript(Handle<String> source, - Handle<Object> name, - int line_offset, - int column_offset, - bool is_shared_cross_origin, - Handle<Context> context); + MaybeHandle<SharedFunctionInfo> LookupScript( + Handle<String> source, Handle<Object> name, int line_offset, + int column_offset, bool is_shared_cross_origin, Handle<Context> context); // Finds the shared function info for a source string for eval in a // given context. Returns an empty handle if the cache doesn't // contain a script for the given source string. - Handle<SharedFunctionInfo> LookupEval(Handle<String> source, - Handle<Context> context, - StrictMode strict_mode, - int scope_position); + MaybeHandle<SharedFunctionInfo> LookupEval( + Handle<String> source, Handle<Context> context, StrictMode strict_mode, + int scope_position); // Returns the regexp data associated with the given regexp if it // is in cache, otherwise an empty handle. - Handle<FixedArray> LookupRegExp(Handle<String> source, - JSRegExp::Flags flags); + MaybeHandle<FixedArray> LookupRegExp( + Handle<String> source, JSRegExp::Flags flags); // Associate the (source, kind) pair to the shared function // info. This may overwrite an existing mapping. diff --git a/deps/v8/src/compiler-intrinsics.h b/deps/v8/src/compiler-intrinsics.h index b73e8ac75..f31895e2d 100644 --- a/deps/v8/src/compiler-intrinsics.h +++ b/deps/v8/src/compiler-intrinsics.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_COMPILER_INTRINSICS_H_ #define V8_COMPILER_INTRINSICS_H_ diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc index 4b539897b..7b9f705bc 100644 --- a/deps/v8/src/compiler.cc +++ b/deps/v8/src/compiler.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -141,6 +118,13 @@ void CompilationInfo::Initialize(Isolate* isolate, SetStrictMode(shared_info_->strict_mode()); } set_bailout_reason(kUnknown); + + if (!shared_info().is_null() && shared_info()->is_compiled()) { + // We should initialize the CompilationInfo feedback vector from the + // passed in shared info, rather than creating a new one. + feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(), + isolate); + } } @@ -249,7 +233,13 @@ bool CompilationInfo::ShouldSelfOptimize() { void CompilationInfo::PrepareForCompilation(Scope* scope) { ASSERT(scope_ == NULL); scope_ = scope; - function()->ProcessFeedbackSlots(isolate_); + + int length = function()->slot_count(); + if (feedback_vector_.is_null()) { + // Allocate the feedback vector too. + feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length); + } + ASSERT(feedback_vector_->length() == length); } @@ -298,13 +288,9 @@ class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder { // the full compiler need not be be used if a debugger is attached, but only if // break points has actually been set. static bool IsDebuggerActive(Isolate* isolate) { -#ifdef ENABLE_DEBUGGER_SUPPORT return isolate->use_crankshaft() ? isolate->debug()->has_break_points() : isolate->debugger()->IsDebuggerActive(); -#else - return false; -#endif } @@ -354,6 +340,10 @@ OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() { return AbortAndDisableOptimization(kTooManyParametersLocals); } + if (scope->HasIllegalRedeclaration()) { + return AbortAndDisableOptimization(kFunctionWithIllegalRedeclaration); + } + // Take --hydrogen-filter into account. if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) { return AbortOptimization(kHydrogenFilter); @@ -541,7 +531,7 @@ void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared, // TODO(yangguo): check whether those heuristics are still up-to-date. // We do not shrink objects that go into a snapshot (yet), so we adjust // the estimate conservatively. - if (Serializer::enabled()) { + if (Serializer::enabled(shared->GetIsolate())) { estimate += 2; } else if (FLAG_clever_optimizations) { // Inobject slack tracking will reclaim redundant inobject space later, @@ -571,6 +561,8 @@ static void UpdateSharedFunctionInfo(CompilationInfo* info) { shared->ReplaceCode(*code); if (shared->optimization_disabled()) code->set_optimizable(false); + shared->set_feedback_vector(*info->feedback_vector()); + // Set the expected number of properties for instances. FunctionLiteral* lit = info->function(); int expected = lit->expected_property_count(); @@ -633,13 +625,14 @@ static bool CompileUnoptimizedCode(CompilationInfo* info) { } -static Handle<Code> GetUnoptimizedCodeCommon(CompilationInfo* info) { +MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon( + CompilationInfo* info) { VMState<COMPILER> state(info->isolate()); PostponeInterruptsScope postpone(info->isolate()); - if (!Parser::Parse(info)) return Handle<Code>::null(); + if (!Parser::Parse(info)) return MaybeHandle<Code>(); info->SetStrictMode(info->function()->strict_mode()); - if (!CompileUnoptimizedCode(info)) return Handle<Code>::null(); + if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); Compiler::RecordFunctionCompilation( Logger::LAZY_COMPILE_TAG, info, info->shared_info()); UpdateSharedFunctionInfo(info); @@ -648,7 +641,7 @@ static Handle<Code> GetUnoptimizedCodeCommon(CompilationInfo* info) { } -Handle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) { +MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) { ASSERT(!function->GetIsolate()->has_pending_exception()); ASSERT(!function->is_compiled()); if (function->shared()->is_compiled()) { @@ -656,39 +649,43 @@ Handle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) { } CompilationInfoWithZone info(function); - Handle<Code> result = GetUnoptimizedCodeCommon(&info); - ASSERT_EQ(result.is_null(), info.isolate()->has_pending_exception()); + Handle<Code> result; + ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result, + GetUnoptimizedCodeCommon(&info), + Code); if (FLAG_always_opt && - !result.is_null() && info.isolate()->use_crankshaft() && !info.shared_info()->optimization_disabled() && !info.isolate()->DebuggerHasBreakPoints()) { - Handle<Code> opt_code = Compiler::GetOptimizedCode( - function, result, Compiler::NOT_CONCURRENT); - if (!opt_code.is_null()) result = opt_code; + Handle<Code> opt_code; + if (Compiler::GetOptimizedCode( + function, result, + Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) { + result = opt_code; + } } return result; } -Handle<Code> Compiler::GetUnoptimizedCode(Handle<SharedFunctionInfo> shared) { +MaybeHandle<Code> Compiler::GetUnoptimizedCode( + Handle<SharedFunctionInfo> shared) { ASSERT(!shared->GetIsolate()->has_pending_exception()); ASSERT(!shared->is_compiled()); CompilationInfoWithZone info(shared); - Handle<Code> result = GetUnoptimizedCodeCommon(&info); - ASSERT_EQ(result.is_null(), info.isolate()->has_pending_exception()); - return result; + return GetUnoptimizedCodeCommon(&info); } bool Compiler::EnsureCompiled(Handle<JSFunction> function, ClearExceptionFlag flag) { if (function->is_compiled()) return true; - Handle<Code> code = Compiler::GetUnoptimizedCode(function); - if (code.is_null()) { + MaybeHandle<Code> maybe_code = Compiler::GetUnoptimizedCode(function); + Handle<Code> code; + if (!maybe_code.ToHandle(&code)) { if (flag == CLEAR_EXCEPTION) { function->GetIsolate()->clear_pending_exception(); } @@ -709,7 +706,7 @@ bool Compiler::EnsureCompiled(Handle<JSFunction> function, // full code without debug break slots to full code with debug break slots // depends on the generated code is otherwise exactly the same. // If compilation fails, just keep the existing code. -Handle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) { +MaybeHandle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) { CompilationInfoWithZone info(function); Isolate* isolate = info.isolate(); VMState<COMPILER> state(isolate); @@ -725,18 +722,18 @@ Handle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) { } else { info.MarkNonOptimizable(); } - Handle<Code> new_code = GetUnoptimizedCodeCommon(&info); - if (new_code.is_null()) { + MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info); + Handle<Code> new_code; + if (!maybe_new_code.ToHandle(&new_code)) { isolate->clear_pending_exception(); } else { ASSERT_EQ(old_code->is_compiled_optimizable(), new_code->is_compiled_optimizable()); } - return new_code; + return maybe_new_code; } -#ifdef ENABLE_DEBUGGER_SUPPORT void Compiler::CompileForLiveEdit(Handle<Script> script) { // TODO(635): support extensions. CompilationInfoWithZone info(script); @@ -756,7 +753,6 @@ void Compiler::CompileForLiveEdit(Handle<Script> script) { } tracker.RecordRootFunctionInfo(info.code()); } -#endif static bool DebuggerWantsEagerCompilation(CompilationInfo* info, @@ -776,9 +772,7 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) { FixedArray* array = isolate->native_context()->embedder_data(); script->set_context_data(array->get(0)); -#ifdef ENABLE_DEBUGGER_SUPPORT isolate->debugger()->OnBeforeCompile(script); -#endif ASSERT(info->is_eval() || info->is_global()); @@ -827,7 +821,8 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) { lit->materialized_literal_count(), lit->is_generator(), info->code(), - ScopeInfo::Create(info->scope(), info->zone())); + ScopeInfo::Create(info->scope(), info->zone()), + info->feedback_vector()); ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position()); SetFunctionInfo(result, lit, true, script); @@ -854,29 +849,30 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) { live_edit_tracker.RecordFunctionInfo(result, lit, info->zone()); } -#ifdef ENABLE_DEBUGGER_SUPPORT isolate->debugger()->OnAfterCompile(script, Debugger::NO_AFTER_COMPILE_FLAGS); -#endif return result; } -Handle<JSFunction> Compiler::GetFunctionFromEval(Handle<String> source, - Handle<Context> context, - StrictMode strict_mode, - ParseRestriction restriction, - int scope_position) { +MaybeHandle<JSFunction> Compiler::GetFunctionFromEval( + Handle<String> source, + Handle<Context> context, + StrictMode strict_mode, + ParseRestriction restriction, + int scope_position) { Isolate* isolate = source->GetIsolate(); int source_length = source->length(); isolate->counters()->total_eval_size()->Increment(source_length); isolate->counters()->total_compile_size()->Increment(source_length); CompilationCache* compilation_cache = isolate->compilation_cache(); - Handle<SharedFunctionInfo> shared_info = compilation_cache->LookupEval( - source, context, strict_mode, scope_position); + MaybeHandle<SharedFunctionInfo> maybe_shared_info = + compilation_cache->LookupEval(source, context, strict_mode, + scope_position); + Handle<SharedFunctionInfo> shared_info; - if (shared_info.is_null()) { + if (!maybe_shared_info.ToHandle(&shared_info)) { Handle<Script> script = isolate->factory()->NewScript(source); CompilationInfoWithZone info(script); info.MarkAsEval(); @@ -885,14 +881,12 @@ Handle<JSFunction> Compiler::GetFunctionFromEval(Handle<String> source, info.SetParseRestriction(restriction); info.SetContext(context); -#if ENABLE_DEBUGGER_SUPPORT Debug::RecordEvalCaller(script); -#endif // ENABLE_DEBUGGER_SUPPORT shared_info = CompileToplevel(&info); if (shared_info.is_null()) { - return Handle<JSFunction>::null(); + return MaybeHandle<JSFunction>(); } else { // Explicitly disable optimization for eval code. We're not yet prepared // to handle eval-code in the optimizing compiler. @@ -922,7 +916,7 @@ Handle<SharedFunctionInfo> Compiler::CompileScript( bool is_shared_cross_origin, Handle<Context> context, v8::Extension* extension, - ScriptDataImpl** cached_data, + ScriptData** cached_data, CachedDataMode cached_data_mode, NativesFlag natives) { if (cached_data_mode == NO_CACHED_DATA) { @@ -941,25 +935,16 @@ Handle<SharedFunctionInfo> Compiler::CompileScript( CompilationCache* compilation_cache = isolate->compilation_cache(); // Do a lookup in the compilation cache but not for extensions. + MaybeHandle<SharedFunctionInfo> maybe_result; Handle<SharedFunctionInfo> result; if (extension == NULL) { - result = compilation_cache->LookupScript(source, - script_name, - line_offset, - column_offset, - is_shared_cross_origin, - context); + maybe_result = compilation_cache->LookupScript( + source, script_name, line_offset, column_offset, + is_shared_cross_origin, context); } - if (result.is_null()) { - // No cache entry found. Do pre-parsing, if it makes sense, and compile - // the script. - // Building preparse data that is only used immediately after is only a - // saving if we might skip building the AST for lazily compiled functions. - // I.e., preparse data isn't relevant when the lazy flag is off, and - // for small sources, odds are that there aren't many functions - // that would be compiled lazily anyway, so we skip the preparse step - // in that case too. + if (!maybe_result.ToHandle(&result)) { + // No cache entry found. Compile the script. // Create a script object describing the script to be compiled. Handle<Script> script = isolate->factory()->NewScript(source); @@ -984,11 +969,10 @@ Handle<SharedFunctionInfo> Compiler::CompileScript( if (extension == NULL && !result.is_null() && !result->dont_cache()) { compilation_cache->PutScript(source, context, result); } + if (result.is_null()) isolate->ReportPendingMessages(); } else if (result->ic_age() != isolate->heap()->global_ic_age()) { result->ResetForNewContext(isolate->heap()->global_ic_age()); } - - if (result.is_null()) isolate->ReportPendingMessages(); return result; } @@ -1036,7 +1020,8 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, literal->materialized_literal_count(), literal->is_generator(), info.code(), - scope_info); + scope_info, + info.feedback_vector()); SetFunctionInfo(result, literal, false, script); RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result); result->set_allows_lazy_compilation(allow_lazy); @@ -1051,8 +1036,9 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, } -static Handle<Code> GetCodeFromOptimizedCodeMap(Handle<JSFunction> function, - BailoutId osr_ast_id) { +MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap( + Handle<JSFunction> function, + BailoutId osr_ast_id) { if (FLAG_cache_optimized_code) { Handle<SharedFunctionInfo> shared(function->shared()); DisallowHeapAllocation no_gc; @@ -1072,7 +1058,7 @@ static Handle<Code> GetCodeFromOptimizedCodeMap(Handle<JSFunction> function, return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index)); } } - return Handle<Code>::null(); + return MaybeHandle<Code>(); } @@ -1159,12 +1145,15 @@ static bool GetOptimizedCodeLater(CompilationInfo* info) { } -Handle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function, - Handle<Code> current_code, - ConcurrencyMode mode, - BailoutId osr_ast_id) { - Handle<Code> cached_code = GetCodeFromOptimizedCodeMap(function, osr_ast_id); - if (!cached_code.is_null()) return cached_code; +MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function, + Handle<Code> current_code, + ConcurrencyMode mode, + BailoutId osr_ast_id) { + Handle<Code> cached_code; + if (GetCodeFromOptimizedCodeMap( + function, osr_ast_id).ToHandle(&cached_code)) { + return cached_code; + } SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function)); Isolate* isolate = info->isolate(); @@ -1197,7 +1186,7 @@ Handle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function, } if (isolate->has_pending_exception()) isolate->clear_pending_exception(); - return Handle<Code>::null(); + return MaybeHandle<Code>(); } @@ -1261,12 +1250,13 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag, info->isolate()->cpu_profiler()->is_profiling()) { Handle<Script> script = info->script(); Handle<Code> code = info->code(); - if (code.is_identical_to(info->isolate()->builtins()->CompileUnoptimized())) + if (code.is_identical_to( + info->isolate()->builtins()->CompileUnoptimized())) { return; - int line_num = GetScriptLineNumber(script, shared->start_position()) + 1; + } + int line_num = Script::GetLineNumber(script, shared->start_position()) + 1; int column_num = - GetScriptColumnNumber(script, shared->start_position()) + 1; - USE(line_num); + Script::GetColumnNumber(script, shared->start_position()) + 1; String* script_name = script->name()->IsString() ? String::cast(script->name()) : info->isolate()->heap()->empty_string(); diff --git a/deps/v8/src/compiler.h b/deps/v8/src/compiler.h index 380201688..24a8a9f5d 100644 --- a/deps/v8/src/compiler.h +++ b/deps/v8/src/compiler.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_COMPILER_H_ #define V8_COMPILER_H_ @@ -35,7 +12,7 @@ namespace v8 { namespace internal { -class ScriptDataImpl; +class ScriptData; class HydrogenCodeStub; // ParseRestriction is used to restrict the set of valid statements in a @@ -83,7 +60,7 @@ class CompilationInfo { Handle<Script> script() const { return script_; } HydrogenCodeStub* code_stub() const {return code_stub_; } v8::Extension* extension() const { return extension_; } - ScriptDataImpl** cached_data() const { return cached_data_; } + ScriptData** cached_data() const { return cached_data_; } CachedDataMode cached_data_mode() const { return cached_data_mode_; } @@ -178,18 +155,20 @@ class CompilationInfo { ASSERT(function_ == NULL); function_ = literal; } - // When the scope is applied, we may have deferred work to do on the function. void PrepareForCompilation(Scope* scope); void SetGlobalScope(Scope* global_scope) { ASSERT(global_scope_ == NULL); global_scope_ = global_scope; } + Handle<FixedArray> feedback_vector() const { + return feedback_vector_; + } void SetCode(Handle<Code> code) { code_ = code; } void SetExtension(v8::Extension* extension) { ASSERT(!is_lazy()); extension_ = extension; } - void SetCachedData(ScriptDataImpl** cached_data, + void SetCachedData(ScriptData** cached_data, CachedDataMode cached_data_mode) { cached_data_mode_ = cached_data_mode; if (cached_data_mode == NO_CACHED_DATA) { @@ -412,13 +391,16 @@ class CompilationInfo { // Fields possibly needed for eager compilation, NULL by default. v8::Extension* extension_; - ScriptDataImpl** cached_data_; + ScriptData** cached_data_; CachedDataMode cached_data_mode_; // The context of the caller for eval code, and the global context for a // global script. Will be a null handle otherwise. Handle<Context> context_; + // Used by codegen, ultimately kept rooted by the SharedFunctionInfo. + Handle<FixedArray> feedback_vector_; + // Compilation mode flag and whether deoptimization is allowed. Mode mode_; BailoutId osr_ast_id_; @@ -556,6 +538,8 @@ class OptimizedCompileJob: public ZoneObject { MUST_USE_RESULT Status AbortAndDisableOptimization( BailoutReason reason = kNoReason) { if (reason != kNoReason) info_->set_bailout_reason(reason); + // Reference to shared function info does not change between phases. + AllowDeferredHandleDereference allow_handle_dereference; info_->shared_info()->DisableOptimization(info_->bailout_reason()); return SetLastStatus(BAILED_OUT); } @@ -615,22 +599,24 @@ class OptimizedCompileJob: public ZoneObject { class Compiler : public AllStatic { public: - static Handle<Code> GetUnoptimizedCode(Handle<JSFunction> function); - static Handle<Code> GetUnoptimizedCode(Handle<SharedFunctionInfo> shared); + MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode( + Handle<JSFunction> function); + MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode( + Handle<SharedFunctionInfo> shared); static bool EnsureCompiled(Handle<JSFunction> function, ClearExceptionFlag flag); - static Handle<Code> GetCodeForDebugging(Handle<JSFunction> function); + MUST_USE_RESULT static MaybeHandle<Code> GetCodeForDebugging( + Handle<JSFunction> function); -#ifdef ENABLE_DEBUGGER_SUPPORT static void CompileForLiveEdit(Handle<Script> script); -#endif // Compile a String source within a context for eval. - static Handle<JSFunction> GetFunctionFromEval(Handle<String> source, - Handle<Context> context, - StrictMode strict_mode, - ParseRestriction restriction, - int scope_position); + MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval( + Handle<String> source, + Handle<Context> context, + StrictMode strict_mode, + ParseRestriction restriction, + int scope_position); // Compile a String source within a context. static Handle<SharedFunctionInfo> CompileScript( @@ -641,7 +627,7 @@ class Compiler : public AllStatic { bool is_shared_cross_origin, Handle<Context> context, v8::Extension* extension, - ScriptDataImpl** cached_data, + ScriptData** cached_data, CachedDataMode cached_data_mode, NativesFlag is_natives_code); @@ -654,7 +640,7 @@ class Compiler : public AllStatic { // Generate and return optimized code or start a concurrent optimization job. // In the latter case, return the InOptimizationQueue builtin. On failure, // return the empty handle. - static Handle<Code> GetOptimizedCode( + MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode( Handle<JSFunction> function, Handle<Code> current_code, ConcurrencyMode mode, diff --git a/deps/v8/src/contexts.cc b/deps/v8/src/contexts.cc index 33d47e9c4..58ae49a9c 100644 --- a/deps/v8/src/contexts.cc +++ b/deps/v8/src/contexts.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -160,7 +137,8 @@ Handle<Object> Context::Lookup(Handle<String> name, } VariableMode mode; InitializationFlag init_flag; - int slot_index = scope_info->ContextSlotIndex(*name, &mode, &init_flag); + int slot_index = + ScopeInfo::ContextSlotIndex(scope_info, name, &mode, &init_flag); ASSERT(slot_index < 0 || slot_index >= MIN_CONTEXT_SLOTS); if (slot_index >= 0) { if (FLAG_trace_contexts) { @@ -231,7 +209,7 @@ Handle<Object> Context::Lookup(Handle<String> name, } else if (context->IsCatchContext()) { // Catch contexts have the variable name in the extension slot. - if (name->Equals(String::cast(context->extension()))) { + if (String::Equals(name, handle(String::cast(context->extension())))) { if (FLAG_trace_contexts) { PrintF("=> found in catch context\n"); } @@ -365,11 +343,11 @@ Object* Context::DeoptimizedCodeListHead() { Handle<Object> Context::ErrorMessageForCodeGenerationFromStrings() { - Handle<Object> result(error_message_for_code_gen_from_strings(), - GetIsolate()); + Isolate* isolate = GetIsolate(); + Handle<Object> result(error_message_for_code_gen_from_strings(), isolate); if (!result->IsUndefined()) return result; - return GetIsolate()->factory()->NewStringFromOneByte(STATIC_ASCII_VECTOR( - "Code generation from strings disallowed for this context")); + return isolate->factory()->NewStringFromStaticAscii( + "Code generation from strings disallowed for this context"); } diff --git a/deps/v8/src/contexts.h b/deps/v8/src/contexts.h index 6ba9b3ed7..f1aa380c7 100644 --- a/deps/v8/src/contexts.h +++ b/deps/v8/src/contexts.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CONTEXTS_H_ #define V8_CONTEXTS_H_ @@ -134,6 +111,16 @@ enum BindingFlags { V(FLOAT32_ARRAY_FUN_INDEX, JSFunction, float32_array_fun) \ V(FLOAT64_ARRAY_FUN_INDEX, JSFunction, float64_array_fun) \ V(UINT8_CLAMPED_ARRAY_FUN_INDEX, JSFunction, uint8_clamped_array_fun) \ + V(INT8_ARRAY_EXTERNAL_MAP_INDEX, Map, int8_array_external_map) \ + V(UINT8_ARRAY_EXTERNAL_MAP_INDEX, Map, uint8_array_external_map) \ + V(INT16_ARRAY_EXTERNAL_MAP_INDEX, Map, int16_array_external_map) \ + V(UINT16_ARRAY_EXTERNAL_MAP_INDEX, Map, uint16_array_external_map) \ + V(INT32_ARRAY_EXTERNAL_MAP_INDEX, Map, int32_array_external_map) \ + V(UINT32_ARRAY_EXTERNAL_MAP_INDEX, Map, uint32_array_external_map) \ + V(FLOAT32_ARRAY_EXTERNAL_MAP_INDEX, Map, float32_array_external_map) \ + V(FLOAT64_ARRAY_EXTERNAL_MAP_INDEX, Map, float64_array_external_map) \ + V(UINT8_CLAMPED_ARRAY_EXTERNAL_MAP_INDEX, Map, \ + uint8_clamped_array_external_map) \ V(DATA_VIEW_FUN_INDEX, JSFunction, data_view_fun) \ V(SLOPPY_FUNCTION_MAP_INDEX, Map, sloppy_function_map) \ V(STRICT_FUNCTION_MAP_INDEX, Map, strict_function_map) \ @@ -168,7 +155,7 @@ enum BindingFlags { V(ERROR_MESSAGE_FOR_CODE_GEN_FROM_STRINGS_INDEX, Object, \ error_message_for_code_gen_from_strings) \ V(RUN_MICROTASKS_INDEX, JSFunction, run_microtasks) \ - V(ENQUEUE_EXTERNAL_MICROTASK_INDEX, JSFunction, enqueue_external_microtask) \ + V(ENQUEUE_MICROTASK_INDEX, JSFunction, enqueue_microtask) \ V(IS_PROMISE_INDEX, JSFunction, is_promise) \ V(PROMISE_CREATE_INDEX, JSFunction, promise_create) \ V(PROMISE_RESOLVE_INDEX, JSFunction, promise_resolve) \ @@ -187,11 +174,19 @@ enum BindingFlags { observers_begin_perform_splice) \ V(OBSERVERS_END_SPLICE_INDEX, JSFunction, \ observers_end_perform_splice) \ + V(NATIVE_OBJECT_OBSERVE_INDEX, JSFunction, \ + native_object_observe) \ + V(NATIVE_OBJECT_GET_NOTIFIER_INDEX, JSFunction, \ + native_object_get_notifier) \ + V(NATIVE_OBJECT_NOTIFIER_PERFORM_CHANGE, JSFunction, \ + native_object_notifier_perform_change) \ V(SLOPPY_GENERATOR_FUNCTION_MAP_INDEX, Map, sloppy_generator_function_map) \ V(STRICT_GENERATOR_FUNCTION_MAP_INDEX, Map, strict_generator_function_map) \ V(GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX, Map, \ generator_object_prototype_map) \ - V(GENERATOR_RESULT_MAP_INDEX, Map, generator_result_map) + V(ITERATOR_RESULT_MAP_INDEX, Map, iterator_result_map) \ + V(MAP_ITERATOR_MAP_INDEX, Map, map_iterator_map) \ + V(SET_ITERATOR_MAP_INDEX, Map, set_iterator_map) // JSFunctions are pairs (context, function code), sometimes also called // closures. A Context object is used to represent function contexts and @@ -309,6 +304,15 @@ class Context: public FixedArray { FLOAT32_ARRAY_FUN_INDEX, FLOAT64_ARRAY_FUN_INDEX, UINT8_CLAMPED_ARRAY_FUN_INDEX, + INT8_ARRAY_EXTERNAL_MAP_INDEX, + UINT8_ARRAY_EXTERNAL_MAP_INDEX, + INT16_ARRAY_EXTERNAL_MAP_INDEX, + UINT16_ARRAY_EXTERNAL_MAP_INDEX, + INT32_ARRAY_EXTERNAL_MAP_INDEX, + UINT32_ARRAY_EXTERNAL_MAP_INDEX, + FLOAT32_ARRAY_EXTERNAL_MAP_INDEX, + FLOAT64_ARRAY_EXTERNAL_MAP_INDEX, + UINT8_CLAMPED_ARRAY_EXTERNAL_MAP_INDEX, DATA_VIEW_FUN_INDEX, MESSAGE_LISTENERS_INDEX, MAKE_MESSAGE_FUN_INDEX, @@ -328,7 +332,7 @@ class Context: public FixedArray { ALLOW_CODE_GEN_FROM_STRINGS_INDEX, ERROR_MESSAGE_FOR_CODE_GEN_FROM_STRINGS_INDEX, RUN_MICROTASKS_INDEX, - ENQUEUE_EXTERNAL_MICROTASK_INDEX, + ENQUEUE_MICROTASK_INDEX, IS_PROMISE_INDEX, PROMISE_CREATE_INDEX, PROMISE_RESOLVE_INDEX, @@ -344,10 +348,15 @@ class Context: public FixedArray { OBSERVERS_ENQUEUE_SPLICE_INDEX, OBSERVERS_BEGIN_SPLICE_INDEX, OBSERVERS_END_SPLICE_INDEX, + NATIVE_OBJECT_OBSERVE_INDEX, + NATIVE_OBJECT_GET_NOTIFIER_INDEX, + NATIVE_OBJECT_NOTIFIER_PERFORM_CHANGE, SLOPPY_GENERATOR_FUNCTION_MAP_INDEX, STRICT_GENERATOR_FUNCTION_MAP_INDEX, GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX, - GENERATOR_RESULT_MAP_INDEX, + ITERATOR_RESULT_MAP_INDEX, + MAP_ITERATOR_MAP_INDEX, + SET_ITERATOR_MAP_INDEX, // Properties from here are treated as weak references by the full GC. // Scavenge treats them as strong references. diff --git a/deps/v8/src/conversions-inl.h b/deps/v8/src/conversions-inl.h index e503eb502..43363f373 100644 --- a/deps/v8/src/conversions-inl.h +++ b/deps/v8/src/conversions-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CONVERSIONS_INL_H_ #define V8_CONVERSIONS_INL_H_ @@ -75,7 +52,11 @@ inline unsigned int FastD2UI(double x) { if (x < k2Pow52) { x += k2Pow52; uint32_t result; +#ifndef V8_TARGET_BIG_ENDIAN Address mantissa_ptr = reinterpret_cast<Address>(&x); +#else + Address mantissa_ptr = reinterpret_cast<Address>(&x) + kIntSize; +#endif // Copy least significant 32 bits of mantissa. OS::MemCopy(&result, mantissa_ptr, sizeof(result)); return negative ? ~result + 1 : result; diff --git a/deps/v8/src/conversions.cc b/deps/v8/src/conversions.cc index 9c52d41e5..14dbc2b7f 100644 --- a/deps/v8/src/conversions.cc +++ b/deps/v8/src/conversions.cc @@ -1,36 +1,18 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> #include <limits.h> #include <cmath> +#include "v8.h" + +#include "assert-scope.h" +#include "conversions.h" #include "conversions-inl.h" #include "dtoa.h" +#include "factory.h" #include "list-inl.h" #include "strtod.h" #include "utils.h" @@ -44,6 +26,47 @@ namespace v8 { namespace internal { +namespace { + +// C++-style iterator adaptor for StringCharacterStream +// (unlike C++ iterators the end-marker has different type). +class StringCharacterStreamIterator { + public: + class EndMarker {}; + + explicit StringCharacterStreamIterator(StringCharacterStream* stream); + + uint16_t operator*() const; + void operator++(); + bool operator==(EndMarker const&) const { return end_; } + bool operator!=(EndMarker const& m) const { return !end_; } + + private: + StringCharacterStream* const stream_; + uint16_t current_; + bool end_; +}; + + +StringCharacterStreamIterator::StringCharacterStreamIterator( + StringCharacterStream* stream) : stream_(stream) { + ++(*this); +} + +uint16_t StringCharacterStreamIterator::operator*() const { + return current_; +} + + +void StringCharacterStreamIterator::operator++() { + end_ = !stream_->HasMore(); + if (!end_) { + current_ = stream_->GetNext(); + } +} +} // End anonymous namespace. + + double StringToDouble(UnicodeCache* unicode_cache, const char* str, int flags, double empty_string_val) { // We cast to const uint8_t* here to avoid instantiating the @@ -56,7 +79,7 @@ double StringToDouble(UnicodeCache* unicode_cache, double StringToDouble(UnicodeCache* unicode_cache, - Vector<const char> str, + Vector<const uint8_t> str, int flags, double empty_string_val) { // We cast to const uint8_t* here to avoid instantiating the @@ -78,6 +101,23 @@ double StringToDouble(UnicodeCache* unicode_cache, } +// Converts a string into an integer. +double StringToInt(UnicodeCache* unicode_cache, + Vector<const uint8_t> vector, + int radix) { + return InternalStringToInt( + unicode_cache, vector.start(), vector.start() + vector.length(), radix); +} + + +double StringToInt(UnicodeCache* unicode_cache, + Vector<const uc16> vector, + int radix) { + return InternalStringToInt( + unicode_cache, vector.start(), vector.start() + vector.length(), radix); +} + + const char* DoubleToCString(double v, Vector<char> buffer) { switch (fpclassify(v)) { case FP_NAN: return "NaN"; @@ -256,7 +296,6 @@ static char* CreateExponentialRepresentation(char* decimal_rep, } - char* DoubleToExponentialCString(double value, int f) { const int kMaxDigitsAfterPoint = 20; // f might be -1 to signal that f was undefined in JavaScript. @@ -443,4 +482,22 @@ char* DoubleToRadixCString(double value, int radix) { return builder.Finalize(); } + +double StringToDouble(UnicodeCache* unicode_cache, + String* string, + int flags, + double empty_string_val) { + DisallowHeapAllocation no_gc; + String::FlatContent flat = string->GetFlatContent(); + // ECMA-262 section 15.1.2.3, empty string is NaN + if (flat.IsAscii()) { + return StringToDouble( + unicode_cache, flat.ToOneByteVector(), flags, empty_string_val); + } else { + return StringToDouble( + unicode_cache, flat.ToUC16Vector(), flags, empty_string_val); + } +} + + } } // namespace v8::internal diff --git a/deps/v8/src/conversions.h b/deps/v8/src/conversions.h index f850f581f..d6c99aa9f 100644 --- a/deps/v8/src/conversions.h +++ b/deps/v8/src/conversions.h @@ -1,33 +1,15 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CONVERSIONS_H_ #define V8_CONVERSIONS_H_ +#include <limits> + +#include "checks.h" +#include "handles.h" +#include "objects.h" #include "utils.h" namespace v8 { @@ -122,7 +104,7 @@ enum ConversionFlags { // Converts a string into a double value according to ECMA-262 9.3.1 double StringToDouble(UnicodeCache* unicode_cache, - Vector<const char> str, + Vector<const uint8_t> str, int flags, double empty_string_val = 0); double StringToDouble(UnicodeCache* unicode_cache, @@ -135,6 +117,16 @@ double StringToDouble(UnicodeCache* unicode_cache, int flags, double empty_string_val = 0); +// Converts a string into an integer. +double StringToInt(UnicodeCache* unicode_cache, + Vector<const uint8_t> vector, + int radix); + + +double StringToInt(UnicodeCache* unicode_cache, + Vector<const uc16> vector, + int radix); + const int kDoubleToCStringMinBufferSize = 100; // Converts a double to a string value according to ECMA-262 9.8.1. @@ -153,6 +145,77 @@ char* DoubleToExponentialCString(double value, int f); char* DoubleToPrecisionCString(double value, int f); char* DoubleToRadixCString(double value, int radix); + +static inline bool IsMinusZero(double value) { + static const DoubleRepresentation minus_zero(-0.0); + return DoubleRepresentation(value) == minus_zero; +} + + +// Integer32 is an integer that can be represented as a signed 32-bit +// integer. It has to be in the range [-2^31, 2^31 - 1]. +// We also have to check for negative 0 as it is not an Integer32. +static inline bool IsInt32Double(double value) { + return !IsMinusZero(value) && + value >= kMinInt && + value <= kMaxInt && + value == FastI2D(FastD2I(value)); +} + + +// Convert from Number object to C integer. +inline int32_t NumberToInt32(Object* number) { + if (number->IsSmi()) return Smi::cast(number)->value(); + return DoubleToInt32(number->Number()); +} + + +inline uint32_t NumberToUint32(Object* number) { + if (number->IsSmi()) return Smi::cast(number)->value(); + return DoubleToUint32(number->Number()); +} + + +double StringToDouble(UnicodeCache* unicode_cache, + String* string, + int flags, + double empty_string_val = 0.0); + + +inline bool TryNumberToSize(Isolate* isolate, + Object* number, size_t* result) { + SealHandleScope shs(isolate); + if (number->IsSmi()) { + int value = Smi::cast(number)->value(); + ASSERT(static_cast<unsigned>(Smi::kMaxValue) + <= std::numeric_limits<size_t>::max()); + if (value >= 0) { + *result = static_cast<size_t>(value); + return true; + } + return false; + } else { + ASSERT(number->IsHeapNumber()); + double value = HeapNumber::cast(number)->value(); + if (value >= 0 && + value <= std::numeric_limits<size_t>::max()) { + *result = static_cast<size_t>(value); + return true; + } else { + return false; + } + } +} + +// Converts a number into size_t. +inline size_t NumberToSize(Isolate* isolate, + Object* number) { + size_t result = 0; + bool is_valid = TryNumberToSize(isolate, number, &result); + CHECK(is_valid); + return result; +} + } } // namespace v8::internal #endif // V8_CONVERSIONS_H_ diff --git a/deps/v8/src/counters.cc b/deps/v8/src/counters.cc index e7fab1c3d..eb1b4c27f 100644 --- a/deps/v8/src/counters.cc +++ b/deps/v8/src/counters.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -76,4 +53,74 @@ void HistogramTimer::Stop() { isolate()->event_logger()(name(), Logger::END); } + +Counters::Counters(Isolate* isolate) { +#define HT(name, caption) \ + name##_ = HistogramTimer(#caption, 0, 10000, 50, isolate); + HISTOGRAM_TIMER_LIST(HT) +#undef HT + +#define HP(name, caption) \ + name##_ = Histogram(#caption, 0, 101, 100, isolate); + HISTOGRAM_PERCENTAGE_LIST(HP) +#undef HP + +#define HM(name, caption) \ + name##_ = Histogram(#caption, 1000, 500000, 50, isolate); + HISTOGRAM_MEMORY_LIST(HM) +#undef HM + +#define SC(name, caption) \ + name##_ = StatsCounter(isolate, "c:" #caption); + + STATS_COUNTER_LIST_1(SC) + STATS_COUNTER_LIST_2(SC) +#undef SC + +#define SC(name) \ + count_of_##name##_ = StatsCounter(isolate, "c:" "V8.CountOf_" #name); \ + size_of_##name##_ = StatsCounter(isolate, "c:" "V8.SizeOf_" #name); + INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + count_of_CODE_TYPE_##name##_ = \ + StatsCounter(isolate, "c:" "V8.CountOf_CODE_TYPE-" #name); \ + size_of_CODE_TYPE_##name##_ = \ + StatsCounter(isolate, "c:" "V8.SizeOf_CODE_TYPE-" #name); + CODE_KIND_LIST(SC) +#undef SC + +#define SC(name) \ + count_of_FIXED_ARRAY_##name##_ = \ + StatsCounter(isolate, "c:" "V8.CountOf_FIXED_ARRAY-" #name); \ + size_of_FIXED_ARRAY_##name##_ = \ + StatsCounter(isolate, "c:" "V8.SizeOf_FIXED_ARRAY-" #name); + FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + count_of_CODE_AGE_##name##_ = \ + StatsCounter(isolate, "c:" "V8.CountOf_CODE_AGE-" #name); \ + size_of_CODE_AGE_##name##_ = \ + StatsCounter(isolate, "c:" "V8.SizeOf_CODE_AGE-" #name); + CODE_AGE_LIST_COMPLETE(SC) +#undef SC +} + + +void Counters::ResetHistograms() { +#define HT(name, caption) name##_.Reset(); + HISTOGRAM_TIMER_LIST(HT) +#undef HT + +#define HP(name, caption) name##_.Reset(); + HISTOGRAM_PERCENTAGE_LIST(HP) +#undef HP + +#define HM(name, caption) name##_.Reset(); + HISTOGRAM_MEMORY_LIST(HM) +#undef HM +} + } } // namespace v8::internal diff --git a/deps/v8/src/counters.h b/deps/v8/src/counters.h index 821c25f8c..18a9aef29 100644 --- a/deps/v8/src/counters.h +++ b/deps/v8/src/counters.h @@ -1,35 +1,15 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_COUNTERS_H_ #define V8_COUNTERS_H_ #include "../include/v8.h" #include "allocation.h" +#include "objects.h" +#include "platform/elapsed-timer.h" +#include "v8globals.h" namespace v8 { namespace internal { @@ -307,6 +287,400 @@ class HistogramTimerScope BASE_EMBEDDED { }; +#define HISTOGRAM_TIMER_LIST(HT) \ + /* Garbage collection timers. */ \ + HT(gc_compactor, V8.GCCompactor) \ + HT(gc_scavenger, V8.GCScavenger) \ + HT(gc_context, V8.GCContext) /* GC context cleanup time */ \ + /* Parsing timers. */ \ + HT(parse, V8.Parse) \ + HT(parse_lazy, V8.ParseLazy) \ + HT(pre_parse, V8.PreParse) \ + /* Total compilation times. */ \ + HT(compile, V8.Compile) \ + HT(compile_eval, V8.CompileEval) \ + HT(compile_lazy, V8.CompileLazy) + +#define HISTOGRAM_PERCENTAGE_LIST(HP) \ + /* Heap fragmentation. */ \ + HP(external_fragmentation_total, \ + V8.MemoryExternalFragmentationTotal) \ + HP(external_fragmentation_old_pointer_space, \ + V8.MemoryExternalFragmentationOldPointerSpace) \ + HP(external_fragmentation_old_data_space, \ + V8.MemoryExternalFragmentationOldDataSpace) \ + HP(external_fragmentation_code_space, \ + V8.MemoryExternalFragmentationCodeSpace) \ + HP(external_fragmentation_map_space, \ + V8.MemoryExternalFragmentationMapSpace) \ + HP(external_fragmentation_cell_space, \ + V8.MemoryExternalFragmentationCellSpace) \ + HP(external_fragmentation_property_cell_space, \ + V8.MemoryExternalFragmentationPropertyCellSpace) \ + HP(external_fragmentation_lo_space, \ + V8.MemoryExternalFragmentationLoSpace) \ + /* Percentages of heap committed to each space. */ \ + HP(heap_fraction_new_space, \ + V8.MemoryHeapFractionNewSpace) \ + HP(heap_fraction_old_pointer_space, \ + V8.MemoryHeapFractionOldPointerSpace) \ + HP(heap_fraction_old_data_space, \ + V8.MemoryHeapFractionOldDataSpace) \ + HP(heap_fraction_code_space, \ + V8.MemoryHeapFractionCodeSpace) \ + HP(heap_fraction_map_space, \ + V8.MemoryHeapFractionMapSpace) \ + HP(heap_fraction_cell_space, \ + V8.MemoryHeapFractionCellSpace) \ + HP(heap_fraction_property_cell_space, \ + V8.MemoryHeapFractionPropertyCellSpace) \ + HP(heap_fraction_lo_space, \ + V8.MemoryHeapFractionLoSpace) \ + /* Percentage of crankshafted codegen. */ \ + HP(codegen_fraction_crankshaft, \ + V8.CodegenFractionCrankshaft) \ + + +#define HISTOGRAM_MEMORY_LIST(HM) \ + HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \ + HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \ + HM(heap_sample_map_space_committed, \ + V8.MemoryHeapSampleMapSpaceCommitted) \ + HM(heap_sample_cell_space_committed, \ + V8.MemoryHeapSampleCellSpaceCommitted) \ + HM(heap_sample_property_cell_space_committed, \ + V8.MemoryHeapSamplePropertyCellSpaceCommitted) \ + HM(heap_sample_code_space_committed, \ + V8.MemoryHeapSampleCodeSpaceCommitted) \ + HM(heap_sample_maximum_committed, \ + V8.MemoryHeapSampleMaximumCommitted) \ + + +// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC +// Intellisense to crash. It was broken into two macros (each of length 40 +// lines) rather than one macro (of length about 80 lines) to work around +// this problem. Please avoid using recursive macros of this length when +// possible. +#define STATS_COUNTER_LIST_1(SC) \ + /* Global Handle Count*/ \ + SC(global_handles, V8.GlobalHandles) \ + /* OS Memory allocated */ \ + SC(memory_allocated, V8.OsMemoryAllocated) \ + SC(normalized_maps, V8.NormalizedMaps) \ + SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \ + SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \ + SC(alive_after_last_gc, V8.AliveAfterLastGC) \ + SC(objs_since_last_young, V8.ObjsSinceLastYoung) \ + SC(objs_since_last_full, V8.ObjsSinceLastFull) \ + SC(string_table_capacity, V8.StringTableCapacity) \ + SC(number_of_symbols, V8.NumberOfSymbols) \ + SC(script_wrappers, V8.ScriptWrappers) \ + SC(call_initialize_stubs, V8.CallInitializeStubs) \ + SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \ + SC(call_normal_stubs, V8.CallNormalStubs) \ + SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \ + SC(arguments_adaptors, V8.ArgumentsAdaptors) \ + SC(compilation_cache_hits, V8.CompilationCacheHits) \ + SC(compilation_cache_misses, V8.CompilationCacheMisses) \ + SC(string_ctor_calls, V8.StringConstructorCalls) \ + SC(string_ctor_conversions, V8.StringConstructorConversions) \ + SC(string_ctor_cached_number, V8.StringConstructorCachedNumber) \ + SC(string_ctor_string_value, V8.StringConstructorStringValue) \ + SC(string_ctor_gc_required, V8.StringConstructorGCRequired) \ + /* Amount of evaled source code. */ \ + SC(total_eval_size, V8.TotalEvalSize) \ + /* Amount of loaded source code. */ \ + SC(total_load_size, V8.TotalLoadSize) \ + /* Amount of parsed source code. */ \ + SC(total_parse_size, V8.TotalParseSize) \ + /* Amount of source code skipped over using preparsing. */ \ + SC(total_preparse_skipped, V8.TotalPreparseSkipped) \ + /* Number of symbol lookups skipped using preparsing */ \ + SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped) \ + /* Amount of compiled source code. */ \ + SC(total_compile_size, V8.TotalCompileSize) \ + /* Amount of source code compiled with the full codegen. */ \ + SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \ + /* Number of contexts created from scratch. */ \ + SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \ + /* Number of contexts created by partial snapshot. */ \ + SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \ + /* Number of code objects found from pc. */ \ + SC(pc_to_code, V8.PcToCode) \ + SC(pc_to_code_cached, V8.PcToCodeCached) \ + /* The store-buffer implementation of the write barrier. */ \ + SC(store_buffer_compactions, V8.StoreBufferCompactions) \ + SC(store_buffer_overflows, V8.StoreBufferOverflows) + + +#define STATS_COUNTER_LIST_2(SC) \ + /* Number of code stubs. */ \ + SC(code_stubs, V8.CodeStubs) \ + /* Amount of stub code. */ \ + SC(total_stubs_code_size, V8.TotalStubsCodeSize) \ + /* Amount of (JS) compiled code. */ \ + SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \ + SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \ + SC(gc_compactor_caused_by_promoted_data, \ + V8.GCCompactorCausedByPromotedData) \ + SC(gc_compactor_caused_by_oldspace_exhaustion, \ + V8.GCCompactorCausedByOldspaceExhaustion) \ + SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \ + SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \ + /* How is the generic keyed-load stub used? */ \ + SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \ + SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \ + SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \ + SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \ + SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \ + SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \ + /* How is the generic keyed-call stub used? */ \ + SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \ + SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \ + SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \ + SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \ + SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \ + SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \ + SC(named_load_global_stub, V8.NamedLoadGlobalStub) \ + SC(named_store_global_inline, V8.NamedStoreGlobalInline) \ + SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \ + SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \ + SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \ + SC(store_normal_miss, V8.StoreNormalMiss) \ + SC(store_normal_hit, V8.StoreNormalHit) \ + SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \ + SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \ + SC(cow_arrays_converted, V8.COWArraysConverted) \ + SC(call_miss, V8.CallMiss) \ + SC(keyed_call_miss, V8.KeyedCallMiss) \ + SC(load_miss, V8.LoadMiss) \ + SC(keyed_load_miss, V8.KeyedLoadMiss) \ + SC(call_const, V8.CallConst) \ + SC(call_const_fast_api, V8.CallConstFastApi) \ + SC(call_const_interceptor, V8.CallConstInterceptor) \ + SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \ + SC(call_global_inline, V8.CallGlobalInline) \ + SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \ + SC(constructed_objects, V8.ConstructedObjects) \ + SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \ + SC(negative_lookups, V8.NegativeLookups) \ + SC(negative_lookups_miss, V8.NegativeLookupsMiss) \ + SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \ + SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \ + SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \ + SC(array_function_runtime, V8.ArrayFunctionRuntime) \ + SC(array_function_native, V8.ArrayFunctionNative) \ + SC(for_in, V8.ForIn) \ + SC(enum_cache_hits, V8.EnumCacheHits) \ + SC(enum_cache_misses, V8.EnumCacheMisses) \ + SC(zone_segment_bytes, V8.ZoneSegmentBytes) \ + SC(fast_new_closure_total, V8.FastNewClosureTotal) \ + SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \ + SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \ + SC(string_add_runtime, V8.StringAddRuntime) \ + SC(string_add_native, V8.StringAddNative) \ + SC(string_add_runtime_ext_to_ascii, V8.StringAddRuntimeExtToAscii) \ + SC(sub_string_runtime, V8.SubStringRuntime) \ + SC(sub_string_native, V8.SubStringNative) \ + SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \ + SC(string_compare_native, V8.StringCompareNative) \ + SC(string_compare_runtime, V8.StringCompareRuntime) \ + SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \ + SC(regexp_entry_native, V8.RegExpEntryNative) \ + SC(number_to_string_native, V8.NumberToStringNative) \ + SC(number_to_string_runtime, V8.NumberToStringRuntime) \ + SC(math_acos, V8.MathAcos) \ + SC(math_asin, V8.MathAsin) \ + SC(math_atan, V8.MathAtan) \ + SC(math_atan2, V8.MathAtan2) \ + SC(math_exp, V8.MathExp) \ + SC(math_floor, V8.MathFloor) \ + SC(math_log, V8.MathLog) \ + SC(math_pow, V8.MathPow) \ + SC(math_round, V8.MathRound) \ + SC(math_sqrt, V8.MathSqrt) \ + SC(stack_interrupts, V8.StackInterrupts) \ + SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \ + SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \ + SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \ + SC(soft_deopts_requested, V8.SoftDeoptsRequested) \ + SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \ + SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \ + /* Number of write barriers in generated code. */ \ + SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \ + SC(write_barriers_static, V8.WriteBarriersStatic) \ + SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \ + SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \ + SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \ + SC(old_pointer_space_bytes_available, \ + V8.MemoryOldPointerSpaceBytesAvailable) \ + SC(old_pointer_space_bytes_committed, \ + V8.MemoryOldPointerSpaceBytesCommitted) \ + SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \ + SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \ + SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \ + SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \ + SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \ + SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \ + SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \ + SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \ + SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \ + SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \ + SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \ + SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \ + SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \ + SC(property_cell_space_bytes_available, \ + V8.MemoryPropertyCellSpaceBytesAvailable) \ + SC(property_cell_space_bytes_committed, \ + V8.MemoryPropertyCellSpaceBytesCommitted) \ + SC(property_cell_space_bytes_used, \ + V8.MemoryPropertyCellSpaceBytesUsed) \ + SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \ + SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \ + SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) + + +// This file contains all the v8 counters that are in use. +class Counters { + public: +#define HT(name, caption) \ + HistogramTimer* name() { return &name##_; } + HISTOGRAM_TIMER_LIST(HT) +#undef HT + +#define HP(name, caption) \ + Histogram* name() { return &name##_; } + HISTOGRAM_PERCENTAGE_LIST(HP) +#undef HP + +#define HM(name, caption) \ + Histogram* name() { return &name##_; } + HISTOGRAM_MEMORY_LIST(HM) +#undef HM + +#define SC(name, caption) \ + StatsCounter* name() { return &name##_; } + STATS_COUNTER_LIST_1(SC) + STATS_COUNTER_LIST_2(SC) +#undef SC + +#define SC(name) \ + StatsCounter* count_of_##name() { return &count_of_##name##_; } \ + StatsCounter* size_of_##name() { return &size_of_##name##_; } + INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter* count_of_CODE_TYPE_##name() \ + { return &count_of_CODE_TYPE_##name##_; } \ + StatsCounter* size_of_CODE_TYPE_##name() \ + { return &size_of_CODE_TYPE_##name##_; } + CODE_KIND_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter* count_of_FIXED_ARRAY_##name() \ + { return &count_of_FIXED_ARRAY_##name##_; } \ + StatsCounter* size_of_FIXED_ARRAY_##name() \ + { return &size_of_FIXED_ARRAY_##name##_; } + FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter* count_of_CODE_AGE_##name() \ + { return &count_of_CODE_AGE_##name##_; } \ + StatsCounter* size_of_CODE_AGE_##name() \ + { return &size_of_CODE_AGE_##name##_; } + CODE_AGE_LIST_COMPLETE(SC) +#undef SC + + enum Id { +#define RATE_ID(name, caption) k_##name, + HISTOGRAM_TIMER_LIST(RATE_ID) +#undef RATE_ID +#define PERCENTAGE_ID(name, caption) k_##name, + HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID) +#undef PERCENTAGE_ID +#define MEMORY_ID(name, caption) k_##name, + HISTOGRAM_MEMORY_LIST(MEMORY_ID) +#undef MEMORY_ID +#define COUNTER_ID(name, caption) k_##name, + STATS_COUNTER_LIST_1(COUNTER_ID) + STATS_COUNTER_LIST_2(COUNTER_ID) +#undef COUNTER_ID +#define COUNTER_ID(name) kCountOf##name, kSizeOf##name, + INSTANCE_TYPE_LIST(COUNTER_ID) +#undef COUNTER_ID +#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \ + kSizeOfCODE_TYPE_##name, + CODE_KIND_LIST(COUNTER_ID) +#undef COUNTER_ID +#define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \ + kSizeOfFIXED_ARRAY__##name, + FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID) +#undef COUNTER_ID +#define COUNTER_ID(name) kCountOfCODE_AGE__##name, \ + kSizeOfCODE_AGE__##name, + CODE_AGE_LIST_COMPLETE(COUNTER_ID) +#undef COUNTER_ID + stats_counter_count + }; + + void ResetHistograms(); + + private: +#define HT(name, caption) \ + HistogramTimer name##_; + HISTOGRAM_TIMER_LIST(HT) +#undef HT + +#define HP(name, caption) \ + Histogram name##_; + HISTOGRAM_PERCENTAGE_LIST(HP) +#undef HP + +#define HM(name, caption) \ + Histogram name##_; + HISTOGRAM_MEMORY_LIST(HM) +#undef HM + +#define SC(name, caption) \ + StatsCounter name##_; + STATS_COUNTER_LIST_1(SC) + STATS_COUNTER_LIST_2(SC) +#undef SC + +#define SC(name) \ + StatsCounter size_of_##name##_; \ + StatsCounter count_of_##name##_; + INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter size_of_CODE_TYPE_##name##_; \ + StatsCounter count_of_CODE_TYPE_##name##_; + CODE_KIND_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter size_of_FIXED_ARRAY_##name##_; \ + StatsCounter count_of_FIXED_ARRAY_##name##_; + FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) +#undef SC + +#define SC(name) \ + StatsCounter size_of_CODE_AGE_##name##_; \ + StatsCounter count_of_CODE_AGE_##name##_; + CODE_AGE_LIST_COMPLETE(SC) +#undef SC + + friend class Isolate; + + explicit Counters(Isolate* isolate); + + DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); +}; + } } // namespace v8::internal #endif // V8_COUNTERS_H_ diff --git a/deps/v8/src/cpu-profiler-inl.h b/deps/v8/src/cpu-profiler-inl.h index 7bfbf5c57..c4efef1b3 100644 --- a/deps/v8/src/cpu-profiler-inl.h +++ b/deps/v8/src/cpu-profiler-inl.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CPU_PROFILER_INL_H_ #define V8_CPU_PROFILER_INL_H_ diff --git a/deps/v8/src/cpu-profiler.cc b/deps/v8/src/cpu-profiler.cc index 41f3e9864..abe29340d 100644 --- a/deps/v8/src/cpu-profiler.cc +++ b/deps/v8/src/cpu-profiler.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -432,7 +409,6 @@ void CpuProfiler::StartProfiling(const char* title, bool record_samples) { if (profiles_->StartProfiling(title, record_samples)) { StartProcessorIfNotStarted(); } - processor_->AddCurrentStack(isolate_); } @@ -442,29 +418,32 @@ void CpuProfiler::StartProfiling(String* title, bool record_samples) { void CpuProfiler::StartProcessorIfNotStarted() { - if (processor_ == NULL) { - Logger* logger = isolate_->logger(); - // Disable logging when using the new implementation. - saved_is_logging_ = logger->is_logging_; - logger->is_logging_ = false; - generator_ = new ProfileGenerator(profiles_); - Sampler* sampler = logger->sampler(); - processor_ = new ProfilerEventsProcessor( - generator_, sampler, sampling_interval_); - is_profiling_ = true; - // Enumerate stuff we already have in the heap. - ASSERT(isolate_->heap()->HasBeenSetUp()); - if (!FLAG_prof_browser_mode) { - logger->LogCodeObjects(); - } - logger->LogCompiledFunctions(); - logger->LogAccessorCallbacks(); - LogBuiltins(); - // Enable stack sampling. - sampler->SetHasProcessingThread(true); - sampler->IncreaseProfilingDepth(); - processor_->StartSynchronously(); + if (processor_ != NULL) { + processor_->AddCurrentStack(isolate_); + return; } + Logger* logger = isolate_->logger(); + // Disable logging when using the new implementation. + saved_is_logging_ = logger->is_logging_; + logger->is_logging_ = false; + generator_ = new ProfileGenerator(profiles_); + Sampler* sampler = logger->sampler(); + processor_ = new ProfilerEventsProcessor( + generator_, sampler, sampling_interval_); + is_profiling_ = true; + // Enumerate stuff we already have in the heap. + ASSERT(isolate_->heap()->HasBeenSetUp()); + if (!FLAG_prof_browser_mode) { + logger->LogCodeObjects(); + } + logger->LogCompiledFunctions(); + logger->LogAccessorCallbacks(); + LogBuiltins(); + // Enable stack sampling. + sampler->SetHasProcessingThread(true); + sampler->IncreaseProfilingDepth(); + processor_->AddCurrentStack(isolate_); + processor_->StartSynchronously(); } diff --git a/deps/v8/src/cpu-profiler.h b/deps/v8/src/cpu-profiler.h index a9f034f0c..e87fe9e77 100644 --- a/deps/v8/src/cpu-profiler.h +++ b/deps/v8/src/cpu-profiler.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_CPU_PROFILER_H_ #define V8_CPU_PROFILER_H_ diff --git a/deps/v8/src/cpu.cc b/deps/v8/src/cpu.cc index 1e622495f..c600eda10 100644 --- a/deps/v8/src/cpu.cc +++ b/deps/v8/src/cpu.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "cpu.h" diff --git a/deps/v8/src/cpu.h b/deps/v8/src/cpu.h index b2e9f7da7..0315435b2 100644 --- a/deps/v8/src/cpu.h +++ b/deps/v8/src/cpu.h @@ -1,29 +1,6 @@ // Copyright 2006-2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This module contains the architecture-specific code. This make the rest of // the code less dependent on differences between different processor @@ -102,11 +79,6 @@ class CPU V8_FINAL BASE_EMBEDDED { // Returns the number of processors online. static int NumberOfProcessorsOnline(); - // Initializes the cpu architecture support. Called once at VM startup. - static void SetUp(); - - static bool SupportsCrankshaft(); - // Flush instruction cache. static void FlushICache(void* start, size_t size); diff --git a/deps/v8/src/d8-debug.cc b/deps/v8/src/d8-debug.cc index 7eb2016bd..7c1beaf2e 100644 --- a/deps/v8/src/d8-debug.cc +++ b/deps/v8/src/d8-debug.cc @@ -1,31 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifdef ENABLE_DEBUGGER_SUPPORT +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "d8.h" #include "d8-debug.h" @@ -370,5 +345,3 @@ void KeyboardThread::Run() { } // namespace v8 - -#endif // ENABLE_DEBUGGER_SUPPORT diff --git a/deps/v8/src/d8-debug.h b/deps/v8/src/d8-debug.h index 2d4f5e150..c211be759 100644 --- a/deps/v8/src/d8-debug.h +++ b/deps/v8/src/d8-debug.h @@ -1,29 +1,6 @@ // Copyright 2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_D8_DEBUG_H_ #define V8_D8_DEBUG_H_ diff --git a/deps/v8/src/d8-posix.cc b/deps/v8/src/d8-posix.cc index 36ade48b8..869cd5313 100644 --- a/deps/v8/src/d8-posix.cc +++ b/deps/v8/src/d8-posix.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdlib.h> diff --git a/deps/v8/src/d8-readline.cc b/deps/v8/src/d8-readline.cc index 57b63bf4e..cb59f6ef1 100644 --- a/deps/v8/src/d8-readline.cc +++ b/deps/v8/src/d8-readline.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdio.h> // NOLINT #include <string.h> // NOLINT diff --git a/deps/v8/src/d8-windows.cc b/deps/v8/src/d8-windows.cc index edf5085d4..8e0dc96b6 100644 --- a/deps/v8/src/d8-windows.cc +++ b/deps/v8/src/d8-windows.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "d8.h" diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 7ac0c6546..396d68b7f 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Defined when linking against shared lib on Windows. @@ -67,7 +44,7 @@ #include "natives.h" #include "platform.h" #include "v8.h" -#endif // V8_SHARED +#endif // !V8_SHARED #if !defined(_WIN32) && !defined(_WIN64) #include <unistd.h> // NOLINT @@ -163,7 +140,7 @@ CounterCollection* Shell::counters_ = &local_counters_; i::Mutex Shell::context_mutex_; const i::TimeTicks Shell::kInitialTicks = i::TimeTicks::HighResolutionNow(); Persistent<Context> Shell::utility_context_; -#endif // V8_SHARED +#endif // !V8_SHARED Persistent<Context> Shell::evaluation_context_; ShellOptions Shell::options; @@ -178,7 +155,7 @@ bool CounterMap::Match(void* key1, void* key2) { const char* name2 = reinterpret_cast<const char*>(key2); return strcmp(name1, name2) == 0; } -#endif // V8_SHARED +#endif // !V8_SHARED // Converts a V8 value to a C string. @@ -193,11 +170,11 @@ bool Shell::ExecuteString(Isolate* isolate, Handle<Value> name, bool print_result, bool report_exceptions) { -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED bool FLAG_debugger = i::FLAG_debugger; #else bool FLAG_debugger = false; -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED HandleScope handle_scope(isolate); TryCatch try_catch; options.script_executed = true; @@ -317,7 +294,7 @@ void Shell::PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args) { i::TimeDelta delta = i::TimeTicks::HighResolutionNow() - kInitialTicks; args.GetReturnValue().Set(delta.InMillisecondsF()); } -#endif // V8_SHARED +#endif // !V8_SHARED // Realm.current() returns the index of the currently active realm. @@ -561,14 +538,14 @@ void Shell::Version(const v8::FunctionCallbackInfo<v8::Value>& args) { void Shell::ReportException(Isolate* isolate, v8::TryCatch* try_catch) { HandleScope handle_scope(isolate); -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED Handle<Context> utility_context; bool enter_context = !isolate->InContext(); if (enter_context) { utility_context = Local<Context>::New(isolate, utility_context_); utility_context->Enter(); } -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED v8::String::Utf8Value exception(try_catch->Exception()); const char* exception_string = ToCString(exception); Handle<Message> message = try_catch->Message(); @@ -603,9 +580,9 @@ void Shell::ReportException(Isolate* isolate, v8::TryCatch* try_catch) { } } printf("\n"); -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED if (enter_context) utility_context->Exit(); -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED } @@ -629,7 +606,6 @@ Handle<Array> Shell::GetCompletions(Isolate* isolate, } -#ifdef ENABLE_DEBUGGER_SUPPORT Local<Object> Shell::DebugMessageDetails(Isolate* isolate, Handle<String> message) { EscapableHandleScope handle_scope(isolate); @@ -670,11 +646,8 @@ void Shell::DispatchDebugMessages() { v8::Context::Scope context_scope(context); v8::Debug::ProcessDebugMessages(); } -#endif // ENABLE_DEBUGGER_SUPPORT -#endif // V8_SHARED -#ifndef V8_SHARED int32_t* Counter::Bind(const char* name, bool is_histogram) { int i; for (i = 0; i < kMaxNameSize - 1 && name[i]; i++) @@ -786,7 +759,6 @@ void Shell::InstallUtilityScript(Isolate* isolate) { evaluation_context->SetSecurityToken(Undefined(isolate)); v8::Context::Scope context_scope(utility_context); -#ifdef ENABLE_DEBUGGER_SUPPORT if (i::FLAG_debugger) printf("JavaScript debugger enabled\n"); // Install the debugger object in the utility scope i::Debug* debug = reinterpret_cast<i::Isolate*>(isolate)->debug(); @@ -797,7 +769,6 @@ void Shell::InstallUtilityScript(Isolate* isolate) { Utils::ToLocal(js_debug)); debug->debug_context()->set_security_token( reinterpret_cast<i::Isolate*>(isolate)->heap()->undefined_value()); -#endif // ENABLE_DEBUGGER_SUPPORT // Run the d8 shell utility script in the utility context int source_index = i::NativesCollection<i::D8>::GetIndex("d8"); @@ -824,14 +795,12 @@ void Shell::InstallUtilityScript(Isolate* isolate) { i::SharedFunctionInfo::cast(*compiled_script)->script())); script_object->set_type(i::Smi::FromInt(i::Script::TYPE_NATIVE)); -#ifdef ENABLE_DEBUGGER_SUPPORT // Start the in-process debugger if requested. if (i::FLAG_debugger && !i::FLAG_debugger_agent) { v8::Debug::SetDebugEventListener2(HandleDebugEvent); } -#endif // ENABLE_DEBUGGER_SUPPORT } -#endif // V8_SHARED +#endif // !V8_SHARED #ifdef COMPRESS_STARTUP_DATA_BZ2 @@ -907,13 +876,13 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate(Isolate* isolate) { FunctionTemplate::New(isolate, PerformanceNow)); global_template->Set(String::NewFromUtf8(isolate, "performance"), performance_template); -#endif // V8_SHARED +#endif // !V8_SHARED #if !defined(V8_SHARED) && !defined(_WIN32) && !defined(_WIN64) Handle<ObjectTemplate> os_templ = ObjectTemplate::New(isolate); AddOSMethods(isolate, os_templ); global_template->Set(String::NewFromUtf8(isolate, "os"), os_templ); -#endif // V8_SHARED +#endif // !V8_SHARED && !_WIN32 && !_WIN64 return global_template; } @@ -939,7 +908,7 @@ void Shell::Initialize(Isolate* isolate) { V8::SetCreateHistogramFunction(CreateHistogram); V8::SetAddHistogramSampleFunction(AddHistogramSample); } -#endif // V8_SHARED +#endif // !V8_SHARED } @@ -952,14 +921,12 @@ void Shell::InitializeDebugger(Isolate* isolate) { utility_context_.Reset(isolate, Context::New(isolate, NULL, global_template)); -#ifdef ENABLE_DEBUGGER_SUPPORT // Start the debugger agent if requested. if (i::FLAG_debugger_agent) { v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true); v8::Debug::SetDebugMessageDispatchHandler(DispatchDebugMessages, true); } -#endif // ENABLE_DEBUGGER_SUPPORT -#endif // V8_SHARED +#endif // !V8_SHARED } @@ -967,7 +934,7 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) { #ifndef V8_SHARED // This needs to be a critical section since this is not thread-safe i::LockGuard<i::Mutex> lock_guard(&context_mutex_); -#endif // V8_SHARED +#endif // !V8_SHARED // Initialize the global objects Handle<ObjectTemplate> global_template = CreateGlobalTemplate(isolate); EscapableHandleScope handle_scope(isolate); @@ -982,14 +949,14 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) { factory->NewFixedArray(js_args.argc); for (int j = 0; j < js_args.argc; j++) { i::Handle<i::String> arg = - factory->NewStringFromUtf8(i::CStrVector(js_args[j])); + factory->NewStringFromUtf8(i::CStrVector(js_args[j])).ToHandleChecked(); arguments_array->set(j, *arg); } i::Handle<i::JSArray> arguments_jsarray = factory->NewJSArrayWithElements(arguments_array); context->Global()->Set(String::NewFromUtf8(isolate, "arguments"), Utils::ToLocal(arguments_jsarray)); -#endif // V8_SHARED +#endif // !V8_SHARED return handle_scope.Escape(context); } @@ -1013,7 +980,7 @@ struct CounterAndKey { inline bool operator<(const CounterAndKey& lhs, const CounterAndKey& rhs) { return strcmp(lhs.key, rhs.key) < 0; } -#endif // V8_SHARED +#endif // !V8_SHARED void Shell::OnExit() { @@ -1054,7 +1021,7 @@ void Shell::OnExit() { } delete counters_file_; delete counter_map_; -#endif // V8_SHARED +#endif // !V8_SHARED } @@ -1169,7 +1136,7 @@ static char* ReadLine(char* data) { static char* ReadWord(char* data) { return ReadToken(data, ' '); } -#endif // V8_SHARED +#endif // !V8_SHARED // Reads a file into a v8 string. @@ -1267,14 +1234,14 @@ void ShellThread::Run() { ptr = next_line; } } -#endif // V8_SHARED +#endif // !V8_SHARED SourceGroup::~SourceGroup() { #ifndef V8_SHARED delete thread_; thread_ = NULL; -#endif // V8_SHARED +#endif // !V8_SHARED } @@ -1382,7 +1349,7 @@ void SourceGroup::WaitForThread() { done_semaphore_.Wait(); } } -#endif // V8_SHARED +#endif // !V8_SHARED bool Shell::SetOptions(int argc, char* argv[]) { @@ -1436,7 +1403,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { #else options.dump_heap_constants = true; argv[i] = NULL; -#endif +#endif // V8_SHARED } else if (strcmp(argv[i], "--throws") == 0) { options.expected_to_throw = true; argv[i] = NULL; @@ -1477,7 +1444,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { printf("-p requires a file containing a list of files as parameter\n"); return false; } -#endif // V8_SHARED +#endif // !V8_SHARED v8::V8::SetFlagsFromCommandLine(&argc, argv, true); @@ -1523,7 +1490,7 @@ int Shell::RunMain(Isolate* isolate, int argc, char* argv[]) { for (int i = 1; i < options.num_isolates; ++i) { options.isolate_sources[i].StartExecuteInThread(); } -#endif // V8_SHARED +#endif // !V8_SHARED { // NOLINT Locker lock(isolate); { @@ -1532,13 +1499,13 @@ int Shell::RunMain(Isolate* isolate, int argc, char* argv[]) { if (options.last_run) { // Keep using the same context in the interactive shell. evaluation_context_.Reset(isolate, context); -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED // If the interactive debugger is enabled make sure to activate // it before running the files passed on the command line. if (i::FLAG_debugger) { InstallUtilityScript(isolate); } -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED } { Context::Scope cscope(context); @@ -1565,7 +1532,7 @@ int Shell::RunMain(Isolate* isolate, int argc, char* argv[]) { thread->Join(); delete thread; } -#endif // V8_SHARED +#endif // !V8_SHARED return 0; } @@ -1639,7 +1606,7 @@ static void DumpHeapConstants(i::Isolate* isolate) { printf("}\n"); #undef ROOT_LIST_CASE } -#endif // V8_SHARED +#endif // !V8_SHARED class ShellArrayBufferAllocator : public v8::ArrayBuffer::Allocator { @@ -1696,6 +1663,7 @@ int Shell::Main(int argc, char* argv[]) { #ifndef V8_SHARED v8::ResourceConstraints constraints; constraints.ConfigureDefaults(i::OS::TotalPhysicalMemory(), + i::OS::MaxVirtualMemory(), i::CPU::NumberOfProcessorsOnline()); v8::SetResourceConstraints(isolate, &constraints); #endif @@ -1742,25 +1710,25 @@ int Shell::Main(int argc, char* argv[]) { } -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED // Run remote debugger if requested, but never on --test if (i::FLAG_remote_debugger && !options.test_shell) { InstallUtilityScript(isolate); RunRemoteDebugger(isolate, i::FLAG_debugger_port); return 0; } -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED // Run interactive shell if explicitly requested or if no script has been // executed, but never on --test if (( options.interactive_shell || !options.script_executed ) && !options.test_shell ) { -#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#ifndef V8_SHARED if (!i::FLAG_debugger) { InstallUtilityScript(isolate); } -#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED RunShell(isolate); } } diff --git a/deps/v8/src/d8.h b/deps/v8/src/d8.h index 3edd8a730..bf290238b 100644 --- a/deps/v8/src/d8.h +++ b/deps/v8/src/d8.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_D8_H_ #define V8_D8_H_ @@ -35,7 +12,7 @@ #include "v8.h" #else #include "../include/v8.h" -#endif // V8_SHARED +#endif // !V8_SHARED namespace v8 { @@ -113,7 +90,7 @@ class CounterMap { static bool Match(void* key1, void* key2); i::HashMap hash_map_; }; -#endif // V8_SHARED +#endif // !V8_SHARED class LineEditor { @@ -143,7 +120,7 @@ class SourceGroup { next_semaphore_(0), done_semaphore_(0), thread_(NULL), -#endif // V8_SHARED +#endif // !V8_SHARED argv_(NULL), begin_offset_(0), end_offset_(0) {} @@ -183,7 +160,7 @@ class SourceGroup { i::Semaphore next_semaphore_; i::Semaphore done_semaphore_; i::Thread* thread_; -#endif // V8_SHARED +#endif // !V8_SHARED void ExitShell(int exit_code); Handle<String> ReadFile(Isolate* isolate, const char* name); @@ -221,7 +198,7 @@ class ShellOptions { #ifndef V8_SHARED num_parallel_files(0), parallel_files(NULL), -#endif // V8_SHARED +#endif // !V8_SHARED script_executed(false), last_run(true), send_idle_notification(false), @@ -239,14 +216,14 @@ class ShellOptions { ~ShellOptions() { #ifndef V8_SHARED delete[] parallel_files; -#endif // V8_SHARED +#endif // !V8_SHARED delete[] isolate_sources; } #ifndef V8_SHARED int num_parallel_files; char** parallel_files; -#endif // V8_SHARED +#endif // !V8_SHARED bool script_executed; bool last_run; bool send_idle_notification; @@ -295,16 +272,14 @@ class Shell : public i::AllStatic { static void AddHistogramSample(void* histogram, int sample); static void MapCounters(const char* name); -#ifdef ENABLE_DEBUGGER_SUPPORT static Local<Object> DebugMessageDetails(Isolate* isolate, Handle<String> message); static Local<Value> DebugCommandToJSONRequest(Isolate* isolate, Handle<String> command); static void DispatchDebugMessages(); -#endif // ENABLE_DEBUGGER_SUPPORT static void PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args); -#endif // V8_SHARED +#endif // !V8_SHARED static void RealmCurrent(const v8::FunctionCallbackInfo<v8::Value>& args); static void RealmOwner(const v8::FunctionCallbackInfo<v8::Value>& args); @@ -400,7 +375,7 @@ class Shell : public i::AllStatic { static Counter* GetCounter(const char* name, bool is_histogram); static void InstallUtilityScript(Isolate* isolate); -#endif // V8_SHARED +#endif // !V8_SHARED static void Initialize(Isolate* isolate); static void InitializeDebugger(Isolate* isolate); static void RunShell(Isolate* isolate); diff --git a/deps/v8/src/d8.js b/deps/v8/src/d8.js index 35b61d54e..0c1f32dc8 100644 --- a/deps/v8/src/d8.js +++ b/deps/v8/src/d8.js @@ -1,29 +1,6 @@ // Copyright 2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; diff --git a/deps/v8/src/data-flow.cc b/deps/v8/src/data-flow.cc index 6a3b05cc8..a2df93368 100644 --- a/deps/v8/src/data-flow.cc +++ b/deps/v8/src/data-flow.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/data-flow.h b/deps/v8/src/data-flow.h index 8ceccf67c..98435e603 100644 --- a/deps/v8/src/data-flow.h +++ b/deps/v8/src/data-flow.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DATAFLOW_H_ #define V8_DATAFLOW_H_ diff --git a/deps/v8/src/date.cc b/deps/v8/src/date.cc index 70d6be989..e0fb5ee12 100644 --- a/deps/v8/src/date.cc +++ b/deps/v8/src/date.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "date.h" @@ -49,9 +26,10 @@ static const char kDaysInMonths[] = void DateCache::ResetDateCache() { static const int kMaxStamp = Smi::kMaxValue; - stamp_ = Smi::FromInt(stamp_->value() + 1); - if (stamp_->value() > kMaxStamp) { + if (stamp_->value() >= kMaxStamp) { stamp_ = Smi::FromInt(0); + } else { + stamp_ = Smi::FromInt(stamp_->value() + 1); } ASSERT(stamp_ != Smi::FromInt(kInvalidStamp)); for (int i = 0; i < kDSTSize; ++i) { diff --git a/deps/v8/src/date.h b/deps/v8/src/date.h index e9c9d9cb0..a2af685d2 100644 --- a/deps/v8/src/date.h +++ b/deps/v8/src/date.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DATE_H_ #define V8_DATE_H_ diff --git a/deps/v8/src/date.js b/deps/v8/src/date.js index b7ecbeb39..2a445979e 100644 --- a/deps/v8/src/date.js +++ b/deps/v8/src/date.js @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declarations have been made // in v8natives.js: diff --git a/deps/v8/src/dateparser-inl.h b/deps/v8/src/dateparser-inl.h index 3cb36fa43..4e866e18d 100644 --- a/deps/v8/src/dateparser-inl.h +++ b/deps/v8/src/dateparser-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DATEPARSER_INL_H_ #define V8_DATEPARSER_INL_H_ diff --git a/deps/v8/src/dateparser.cc b/deps/v8/src/dateparser.cc index 3964e8117..0c2c18b34 100644 --- a/deps/v8/src/dateparser.cc +++ b/deps/v8/src/dateparser.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/dateparser.h b/deps/v8/src/dateparser.h index 7dc489de3..1b4efb6ae 100644 --- a/deps/v8/src/dateparser.h +++ b/deps/v8/src/dateparser.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DATEPARSER_H_ #define V8_DATEPARSER_H_ diff --git a/deps/v8/src/debug-agent.cc b/deps/v8/src/debug-agent.cc index 49790cee9..94f334bfe 100644 --- a/deps/v8/src/debug-agent.cc +++ b/deps/v8/src/debug-agent.cc @@ -1,31 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifdef ENABLE_DEBUGGER_SUPPORT +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "debug.h" @@ -229,10 +204,10 @@ void DebuggerAgentSession::Run() { decoder.WriteUtf16(temp.start(), utf16_length); // Send the request received to the debugger. - v8::Debug::SendCommand(temp.start(), + v8::Debug::SendCommand(reinterpret_cast<v8::Isolate*>(agent_->isolate()), + temp.start(), utf16_length, - NULL, - reinterpret_cast<v8::Isolate*>(agent_->isolate())); + NULL); if (is_closing_session) { // Session is closed. @@ -504,5 +479,3 @@ int DebuggerAgentUtil::ReceiveAll(Socket* conn, char* data, int len) { } } } // namespace v8::internal - -#endif // ENABLE_DEBUGGER_SUPPORT diff --git a/deps/v8/src/debug-agent.h b/deps/v8/src/debug-agent.h index e81e4cd6a..3e3f25a5d 100644 --- a/deps/v8/src/debug-agent.h +++ b/deps/v8/src/debug-agent.h @@ -1,34 +1,10 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DEBUG_AGENT_H_ #define V8_DEBUG_AGENT_H_ -#ifdef ENABLE_DEBUGGER_SUPPORT #include "../include/v8-debug.h" #include "platform.h" @@ -114,6 +90,4 @@ class DebuggerAgentUtil { } } // namespace v8::internal -#endif // ENABLE_DEBUGGER_SUPPORT - #endif // V8_DEBUG_AGENT_H_ diff --git a/deps/v8/src/debug-debugger.js b/deps/v8/src/debug-debugger.js index b159ae3b2..0ce88328b 100644 --- a/deps/v8/src/debug-debugger.js +++ b/deps/v8/src/debug-debugger.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Default number of frames to include in the response to backtrace request. var kDefaultBacktraceLength = 10; @@ -1093,15 +1070,16 @@ BreakEvent.prototype.toJSONProtocol = function() { }; -function MakeExceptionEvent(exec_state, exception, uncaught) { - return new ExceptionEvent(exec_state, exception, uncaught); +function MakeExceptionEvent(exec_state, exception, uncaught, promise) { + return new ExceptionEvent(exec_state, exception, uncaught, promise); } -function ExceptionEvent(exec_state, exception, uncaught) { +function ExceptionEvent(exec_state, exception, uncaught, promise) { this.exec_state_ = exec_state; this.exception_ = exception; this.uncaught_ = uncaught; + this.promise_ = promise; } @@ -1125,6 +1103,11 @@ ExceptionEvent.prototype.uncaught = function() { }; +ExceptionEvent.prototype.promise = function() { + return this.promise_; +}; + + ExceptionEvent.prototype.func = function() { return this.exec_state_.frame(0).func(); }; @@ -1217,31 +1200,6 @@ CompileEvent.prototype.toJSONProtocol = function() { }; -function MakeNewFunctionEvent(func) { - return new NewFunctionEvent(func); -} - - -function NewFunctionEvent(func) { - this.func = func; -} - - -NewFunctionEvent.prototype.eventType = function() { - return Debug.DebugEvent.NewFunction; -}; - - -NewFunctionEvent.prototype.name = function() { - return this.func.name; -}; - - -NewFunctionEvent.prototype.setBreakPoint = function(p) { - Debug.setBreakPoint(this.func, p || 0); -}; - - function MakeScriptCollectedEvent(exec_state, id) { return new ScriptCollectedEvent(exec_state, id); } diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc index d7667f19c..3ecf8bada 100644 --- a/deps/v8/src/debug.cc +++ b/deps/v8/src/debug.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -53,9 +30,6 @@ namespace v8 { namespace internal { -#ifdef ENABLE_DEBUGGER_SUPPORT - - Debug::Debug(Isolate* isolate) : has_break_points_(false), script_cache_(NULL), @@ -63,10 +37,11 @@ Debug::Debug(Isolate* isolate) disable_break_(false), break_on_exception_(false), break_on_uncaught_exception_(false), - debug_break_return_(NULL), - debug_break_slot_(NULL), + promise_catch_handlers_(0), + promise_getters_(0), isolate_(isolate) { memset(registers_, 0, sizeof(JSCallerSavedBuffer)); + ThreadInit(); } @@ -406,6 +381,7 @@ bool BreakLocationIterator::IsStepInLocation(Isolate* isolate) { if (target_code->kind() == Code::STUB) { return target_code->major_key() == CodeStub::CallFunction; } + return target_code->is_call_stub(); } return false; } @@ -688,21 +664,6 @@ void ScriptCache::HandleWeakScript( } -void Debug::SetUp(bool create_heap_objects) { - ThreadInit(); - if (create_heap_objects) { - // Get code to handle debug break on return. - debug_break_return_ = - isolate_->builtins()->builtin(Builtins::kReturn_DebugBreak); - ASSERT(debug_break_return_->IsCode()); - // Get code to handle debug break in debug break slots. - debug_break_slot_ = - isolate_->builtins()->builtin(Builtins::kSlot_DebugBreak); - ASSERT(debug_break_slot_->IsCode()); - } -} - - void Debug::HandleWeakDebugInfo( const v8::WeakCallbackData<v8::Value, void>& data) { Debug* debug = reinterpret_cast<Isolate*>(data.GetIsolate())->debug(); @@ -753,8 +714,8 @@ bool Debug::CompileDebuggerScript(Isolate* isolate, int index) { Handle<String> source_code = isolate->bootstrapper()->NativesSourceLookup(index); Vector<const char> name = Natives::GetScriptName(index); - Handle<String> script_name = factory->NewStringFromAscii(name); - ASSERT(!script_name.is_null()); + Handle<String> script_name = + factory->NewStringFromAscii(name).ToHandleChecked(); Handle<Context> context = isolate->native_context(); // Compile the script. @@ -774,19 +735,19 @@ bool Debug::CompileDebuggerScript(Isolate* isolate, int index) { } // Execute the shared function in the debugger context. - bool caught_exception; Handle<JSFunction> function = factory->NewFunctionFromSharedFunctionInfo(function_info, context); - Handle<Object> exception = + Handle<Object> exception; + MaybeHandle<Object> result = Execution::TryCall(function, Handle<Object>(context->global_object(), isolate), 0, NULL, - &caught_exception); + &exception); // Check for caught exceptions. - if (caught_exception) { + if (result.is_null()) { ASSERT(!isolate->has_pending_exception()); MessageLocation computed_location; isolate->ComputeLocation(&computed_location); @@ -847,7 +808,7 @@ bool Debug::Load() { Handle<String> key = isolate_->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("builtins")); Handle<GlobalObject> global = Handle<GlobalObject>(context->global_object()); - RETURN_IF_EMPTY_HANDLE_VALUE( + RETURN_ON_EXCEPTION_VALUE( isolate_, JSReceiver::SetProperty(global, key, @@ -906,12 +867,6 @@ void Debug::PreemptionWhileInDebugger() { } -void Debug::Iterate(ObjectVisitor* v) { - v->VisitPointer(BitCast<Object**>(&(debug_break_return_))); - v->VisitPointer(BitCast<Object**>(&(debug_break_slot_))); -} - - Object* Debug::Break(Arguments args) { Heap* heap = isolate_->heap(); HandleScope scope(isolate_); @@ -1061,7 +1016,7 @@ Object* Debug::Break(Arguments args) { } -RUNTIME_FUNCTION(Object*, Debug_Break) { +RUNTIME_FUNCTION(Debug_Break) { return isolate->debug()->Break(args); } @@ -1116,31 +1071,26 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) { Handle<String> is_break_point_triggered_string = factory->InternalizeOneByteString( STATIC_ASCII_VECTOR("IsBreakPointTriggered")); + Handle<GlobalObject> debug_global(debug_context()->global_object()); Handle<JSFunction> check_break_point = - Handle<JSFunction>(JSFunction::cast( - debug_context()->global_object()->GetPropertyNoExceptionThrown( - *is_break_point_triggered_string))); + Handle<JSFunction>::cast(Object::GetProperty( + debug_global, is_break_point_triggered_string).ToHandleChecked()); // Get the break id as an object. Handle<Object> break_id = factory->NewNumberFromInt(Debug::break_id()); // Call HandleBreakPointx. - bool caught_exception; Handle<Object> argv[] = { break_id, break_point_object }; - Handle<Object> result = Execution::TryCall(check_break_point, - isolate_->js_builtins_object(), - ARRAY_SIZE(argv), - argv, - &caught_exception); - - // If exception or non boolean result handle as not triggered - if (caught_exception || !result->IsBoolean()) { + Handle<Object> result; + if (!Execution::TryCall(check_break_point, + isolate_->js_builtins_object(), + ARRAY_SIZE(argv), + argv).ToHandle(&result)) { return false; } // Return whether the break point is triggered. - ASSERT(!result.is_null()); - return (*result)->IsTrue(); + return result->IsTrue(); } @@ -1348,6 +1298,53 @@ bool Debug::IsBreakOnException(ExceptionBreakType type) { } +void Debug::PromiseHandlePrologue(Handle<JSFunction> promise_getter) { + Handle<JSFunction> promise_getter_global = Handle<JSFunction>::cast( + isolate_->global_handles()->Create(*promise_getter)); + StackHandler* handler = + StackHandler::FromAddress(Isolate::handler(isolate_->thread_local_top())); + promise_getters_.Add(promise_getter_global); + promise_catch_handlers_.Add(handler); +} + + +void Debug::PromiseHandleEpilogue() { + if (promise_catch_handlers_.length() == 0) return; + promise_catch_handlers_.RemoveLast(); + Handle<Object> promise_getter = promise_getters_.RemoveLast(); + isolate_->global_handles()->Destroy(promise_getter.location()); +} + + +Handle<Object> Debug::GetPromiseForUncaughtException() { + Handle<Object> undefined = isolate_->factory()->undefined_value(); + if (promise_getters_.length() == 0) return undefined; + Handle<JSFunction> promise_getter = promise_getters_.last(); + StackHandler* promise_catch = promise_catch_handlers_.last(); + // Find the top-most try-catch handler. + StackHandler* handler = StackHandler::FromAddress( + Isolate::handler(isolate_->thread_local_top())); + while (handler != NULL && !handler->is_catch()) { + handler = handler->next(); + } +#ifdef DEBUG + // Make sure that our promise catch handler is in the list of handlers, + // even if it's not the top-most try-catch handler. + StackHandler* temp = handler; + while (temp != promise_catch && !temp->is_catch()) { + temp = temp->next(); + CHECK(temp != NULL); + } +#endif // DEBUG + + if (handler == promise_catch) { + return Execution::Call( + isolate_, promise_getter, undefined, 0, NULL).ToHandleChecked(); + } + return undefined; +} + + void Debug::PrepareStep(StepAction step_action, int step_count, StackFrame::Id frame_id) { @@ -1425,6 +1422,9 @@ void Debug::PrepareStep(StepAction step_action, bool is_call_target = false; Address target = it.rinfo()->target_address(); Code* code = Code::GetCodeFromTargetAddress(target); + if (code->is_call_stub()) { + is_call_target = true; + } if (code->is_inline_cache_stub()) { is_inline_cache_stub = true; is_load_or_store = !is_call_target; @@ -1439,8 +1439,9 @@ void Debug::PrepareStep(StepAction step_action, maybe_call_function_stub = Code::GetCodeFromTargetAddress(original_target); } - if (maybe_call_function_stub->kind() == Code::STUB && - maybe_call_function_stub->major_key() == CodeStub::CallFunction) { + if ((maybe_call_function_stub->kind() == Code::STUB && + maybe_call_function_stub->major_key() == CodeStub::CallFunction) || + maybe_call_function_stub->kind() == Code::CALL_IC) { // Save reference to the code as we may need it to find out arguments // count for 'step in' later. call_function_stub = Handle<Code>(maybe_call_function_stub); @@ -1496,6 +1497,7 @@ void Debug::PrepareStep(StepAction step_action, } else if (!call_function_stub.is_null()) { // If it's CallFunction stub ensure target function is compiled and flood // it with one shot breakpoints. + bool is_call_ic = call_function_stub->kind() == Code::CALL_IC; // Find out number of arguments from the stub minor key. // Reverse lookup required as the minor key cannot be retrieved @@ -1511,11 +1513,13 @@ void Debug::PrepareStep(StepAction step_action, uint32_t key = Smi::cast(*obj)->value(); // Argc in the stub is the number of arguments passed - not the // expected arguments of the called function. - int call_function_arg_count = - CallFunctionStub::ExtractArgcFromMinorKey( + int call_function_arg_count = is_call_ic + ? CallICStub::ExtractArgcFromMinorKey(CodeStub::MinorKeyFromKey(key)) + : CallFunctionStub::ExtractArgcFromMinorKey( CodeStub::MinorKeyFromKey(key)); - ASSERT(call_function_stub->major_key() == - CodeStub::MajorKeyFromKey(key)); + + ASSERT(is_call_ic || + call_function_stub->major_key() == CodeStub::MajorKeyFromKey(key)); // Find target function on the expression stack. // Expression stack looks like this (top to bottom): @@ -1643,6 +1647,9 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) { // used by the call site. if (code->is_inline_cache_stub()) { switch (code->kind()) { + case Code::CALL_IC: + return isolate->builtins()->CallICStub_DebugBreak(); + case Code::LOAD_IC: return isolate->builtins()->LoadIC_DebugBreak(); @@ -1671,11 +1678,7 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) { } if (code->kind() == Code::STUB) { ASSERT(code->major_key() == CodeStub::CallFunction); - if (code->has_function_cache()) { - return isolate->builtins()->CallFunctionStub_Recording_DebugBreak(); - } else { - return isolate->builtins()->CallFunctionStub_DebugBreak(); - } + return isolate->builtins()->CallFunctionStub_DebugBreak(); } UNREACHABLE(); @@ -1878,6 +1881,59 @@ static void CollectActiveFunctionsFromThread( } +// Figure out how many bytes of "pc_offset" correspond to actual code by +// subtracting off the bytes that correspond to constant/veneer pools. See +// Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this +// is only useful for architectures using constant pools or veneer pools. +static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) { + ASSERT_EQ(code->kind(), Code::FUNCTION); + ASSERT(!code->has_debug_break_slots()); + ASSERT_LE(0, pc_offset); + ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start()); + + int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | + RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + byte *pc = code->instruction_start() + pc_offset; + int code_offset = pc_offset; + for (RelocIterator it(code, mask); !it.done(); it.next()) { + RelocInfo* info = it.rinfo(); + if (info->pc() >= pc) break; + ASSERT(RelocInfo::IsConstPool(info->rmode())); + code_offset -= static_cast<int>(info->data()); + ASSERT_LE(0, code_offset); + } + + return code_offset; +} + + +// The inverse of ComputeCodeOffsetFromPcOffset. +static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) { + ASSERT_EQ(code->kind(), Code::FUNCTION); + + int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | + RelocInfo::ModeMask(RelocInfo::CONST_POOL) | + RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + int reloc = 0; + for (RelocIterator it(code, mask); !it.done(); it.next()) { + RelocInfo* info = it.rinfo(); + if (info->pc() - code->instruction_start() - reloc >= code_offset) break; + if (RelocInfo::IsDebugBreakSlot(info->rmode())) { + reloc += Assembler::kDebugBreakSlotLength; + } else { + ASSERT(RelocInfo::IsConstPool(info->rmode())); + reloc += static_cast<int>(info->data()); + } + } + + int pc_offset = code_offset + reloc; + + ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end()); + + return pc_offset; +} + + static void RedirectActivationsToRecompiledCodeOnThread( Isolate* isolate, ThreadLocalTop* top) { @@ -1899,51 +1955,13 @@ static void RedirectActivationsToRecompiledCodeOnThread( continue; } - // Iterate over the RelocInfo in the original code to compute the sum of the - // constant pools and veneer pools sizes. (See Assembler::CheckConstPool() - // and Assembler::CheckVeneerPool()) - // Note that this is only useful for architectures using constant pools or - // veneer pools. - int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | - RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - int frame_pool_size = 0; - for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) { - RelocInfo* info = it.rinfo(); - if (info->pc() >= frame->pc()) break; - frame_pool_size += static_cast<int>(info->data()); - } - intptr_t frame_offset = - frame->pc() - frame_code->instruction_start() - frame_pool_size; - - // Iterate over the RelocInfo for new code to find the number of bytes - // generated for debug slots and constant pools. - int debug_break_slot_bytes = 0; - int new_code_pool_size = 0; - int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | - RelocInfo::ModeMask(RelocInfo::CONST_POOL) | - RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - for (RelocIterator it(*new_code, mask); !it.done(); it.next()) { - // Check if the pc in the new code with debug break - // slots is before this slot. - RelocInfo* info = it.rinfo(); - intptr_t new_offset = info->pc() - new_code->instruction_start() - - new_code_pool_size - debug_break_slot_bytes; - if (new_offset >= frame_offset) { - break; - } - - if (RelocInfo::IsDebugBreakSlot(info->rmode())) { - debug_break_slot_bytes += Assembler::kDebugBreakSlotLength; - } else { - ASSERT(RelocInfo::IsConstPool(info->rmode())); - // The size of the pools is encoded in the data. - new_code_pool_size += static_cast<int>(info->data()); - } - } + int old_pc_offset = + static_cast<int>(frame->pc() - frame_code->instruction_start()); + int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset); + int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset); // Compute the equivalent pc in the new code. - byte* new_pc = new_code->instruction_start() + frame_offset + - debug_break_slot_bytes + new_code_pool_size; + byte* new_pc = new_code->instruction_start() + new_pc_offset; if (FLAG_trace_deopt) { PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " @@ -1999,6 +2017,55 @@ class ActiveFunctionsRedirector : public ThreadVisitor { }; +class ForceDebuggerActive { + public: + explicit ForceDebuggerActive(Isolate *isolate) { + isolate_ = isolate; + old_state_ = isolate->debugger()->force_debugger_active(); + isolate_->debugger()->set_force_debugger_active(true); + } + + ~ForceDebuggerActive() { + isolate_->debugger()->set_force_debugger_active(old_state_); + } + + private: + Isolate *isolate_; + bool old_state_; + + DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive); +}; + + +void Debug::MaybeRecompileFunctionForDebugging(Handle<JSFunction> function) { + ASSERT_EQ(Code::FUNCTION, function->code()->kind()); + ASSERT_EQ(function->code(), function->shared()->code()); + + if (function->code()->has_debug_break_slots()) return; + + ForceDebuggerActive force_debugger_active(isolate_); + MaybeHandle<Code> code = Compiler::GetCodeForDebugging(function); + // Recompilation can fail. In that case leave the code as it was. + if (!code.is_null()) + function->ReplaceCode(*code.ToHandleChecked()); + ASSERT_EQ(function->code(), function->shared()->code()); +} + + +void Debug::RecompileAndRelocateSuspendedGenerators( + const List<Handle<JSGeneratorObject> > &generators) { + for (int i = 0; i < generators.length(); i++) { + Handle<JSFunction> fun(generators[i]->function()); + + MaybeRecompileFunctionForDebugging(fun); + + int code_offset = generators[i]->continuation(); + int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset); + generators[i]->set_continuation(pc_offset); + } +} + + void Debug::PrepareForBreakPoints() { // If preparing for the first break point make sure to deoptimize all // functions as debugging does not work with optimized code. @@ -2018,6 +2085,21 @@ void Debug::PrepareForBreakPoints() { // is used both in GC and non-GC code. List<Handle<JSFunction> > active_functions(100); + // A list of all suspended generators. + List<Handle<JSGeneratorObject> > suspended_generators; + + // A list of all generator functions. We need to recompile all functions, + // but we don't know until after visiting the whole heap which generator + // functions have suspended activations and which do not. As in the case of + // functions with activations on the stack, we need to be careful with + // generator functions with suspended activations because although they + // should be recompiled, recompilation can fail, and we need to avoid + // leaving the heap in an inconsistent state. + // + // We could perhaps avoid this list and instead re-use the GC metadata + // links. + List<Handle<JSFunction> > generator_functions; + { // We are going to iterate heap to find all functions without // debug break slots. @@ -2055,6 +2137,11 @@ void Debug::PrepareForBreakPoints() { if (function->IsBuiltin()) continue; if (shared->code()->gc_metadata() == active_code_marker) continue; + if (shared->is_generator()) { + generator_functions.Add(Handle<JSFunction>(function, isolate_)); + continue; + } + Code::Kind kind = function->code()->kind(); if (kind == Code::FUNCTION && !function->code()->has_debug_break_slots()) { @@ -2074,6 +2161,24 @@ void Debug::PrepareForBreakPoints() { function->shared()->set_code(*lazy_compile); } } + } else if (obj->IsJSGeneratorObject()) { + JSGeneratorObject* gen = JSGeneratorObject::cast(obj); + if (!gen->is_suspended()) continue; + + JSFunction* fun = gen->function(); + ASSERT_EQ(fun->code()->kind(), Code::FUNCTION); + if (fun->code()->has_debug_break_slots()) continue; + + int pc_offset = gen->continuation(); + ASSERT_LT(0, pc_offset); + + int code_offset = + ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset); + + // This will be fixed after we recompile the functions. + gen->set_continuation(code_offset); + + suspended_generators.Add(Handle<JSGeneratorObject>(gen, isolate_)); } } @@ -2084,41 +2189,35 @@ void Debug::PrepareForBreakPoints() { } } + // Recompile generator functions that have suspended activations, and + // relocate those activations. + RecompileAndRelocateSuspendedGenerators(suspended_generators); + + // Mark generator functions that didn't have suspended activations for lazy + // recompilation. Note that this set does not include any active functions. + for (int i = 0; i < generator_functions.length(); i++) { + Handle<JSFunction> &function = generator_functions[i]; + if (function->code()->kind() != Code::FUNCTION) continue; + if (function->code()->has_debug_break_slots()) continue; + function->set_code(*lazy_compile); + function->shared()->set_code(*lazy_compile); + } + // Now recompile all functions with activation frames and and - // patch the return address to run in the new compiled code. + // patch the return address to run in the new compiled code. It could be + // that some active functions were recompiled already by the suspended + // generator recompilation pass above; a generator with suspended + // activations could also have active activations. That's fine. for (int i = 0; i < active_functions.length(); i++) { Handle<JSFunction> function = active_functions[i]; Handle<SharedFunctionInfo> shared(function->shared()); - if (function->code()->kind() == Code::FUNCTION && - function->code()->has_debug_break_slots()) { - // Nothing to do. Function code already had debug break slots. - continue; - } - // If recompilation is not possible just skip it. - if (shared->is_toplevel() || - !shared->allows_lazy_compilation() || - shared->code()->kind() == Code::BUILTIN) { - continue; - } - - // Make sure that the shared full code is compiled with debug - // break slots. - if (!shared->code()->has_debug_break_slots()) { - // Try to compile the full code with debug break slots. If it - // fails just keep the current code. - bool prev_force_debugger_active = - isolate_->debugger()->force_debugger_active(); - isolate_->debugger()->set_force_debugger_active(true); - Handle<Code> code = Compiler::GetCodeForDebugging(function); - function->ReplaceCode(*code); - isolate_->debugger()->set_force_debugger_active( - prev_force_debugger_active); - } + if (shared->is_toplevel()) continue; + if (!shared->allows_lazy_compilation()) continue; + if (shared->code()->kind() == Code::BUILTIN) continue; - // Keep function code in sync with shared function info. - function->set_code(shared->code()); + MaybeRecompileFunctionForDebugging(function); } RedirectActivationsToRecompiledCodeOnThread(isolate_, @@ -2227,10 +2326,10 @@ Object* Debug::FindSharedFunctionInfoInScript(Handle<Script> script, // will compile all inner functions that cannot be compiled without a // context, because Compiler::BuildFunctionInfo checks whether the // debugger is active. - Handle<Code> result = target_function.is_null() + MaybeHandle<Code> maybe_result = target_function.is_null() ? Compiler::GetUnoptimizedCode(target) : Compiler::GetUnoptimizedCode(target_function); - if (result.is_null()) return isolate_->heap()->undefined_value(); + if (maybe_result.is_null()) return isolate_->heap()->undefined_value(); } } // End while loop. @@ -2463,14 +2562,14 @@ void Debug::ClearMirrorCache() { // Clear the mirror cache. Handle<String> function_name = isolate_->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("ClearMirrorCache")); - Handle<Object> fun( - isolate_->global_object()->GetPropertyNoExceptionThrown(*function_name), - isolate_); + Handle<Object> fun = Object::GetProperty( + isolate_->global_object(), function_name).ToHandleChecked(); ASSERT(fun->IsJSFunction()); - bool caught_exception; - Execution::TryCall(Handle<JSFunction>::cast(fun), + Execution::TryCall( + Handle<JSFunction>::cast(fun), Handle<JSObject>(Debug::debug_context()->global_object()), - 0, NULL, &caught_exception); + 0, + NULL); } @@ -2591,113 +2690,82 @@ Debugger::Debugger(Isolate* isolate) Debugger::~Debugger() {} -Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name, - int argc, - Handle<Object> argv[], - bool* caught_exception) { +MaybeHandle<Object> Debugger::MakeJSObject( + Vector<const char> constructor_name, + int argc, + Handle<Object> argv[]) { ASSERT(isolate_->context() == *isolate_->debug()->debug_context()); // Create the execution state object. Handle<String> constructor_str = isolate_->factory()->InternalizeUtf8String(constructor_name); ASSERT(!constructor_str.is_null()); - Handle<Object> constructor( - isolate_->global_object()->GetPropertyNoExceptionThrown(*constructor_str), - isolate_); + Handle<Object> constructor = Object::GetProperty( + isolate_->global_object(), constructor_str).ToHandleChecked(); ASSERT(constructor->IsJSFunction()); - if (!constructor->IsJSFunction()) { - *caught_exception = true; - return isolate_->factory()->undefined_value(); - } - Handle<Object> js_object = Execution::TryCall( + if (!constructor->IsJSFunction()) return MaybeHandle<Object>(); + return Execution::TryCall( Handle<JSFunction>::cast(constructor), Handle<JSObject>(isolate_->debug()->debug_context()->global_object()), argc, - argv, - caught_exception); - return js_object; + argv); } -Handle<Object> Debugger::MakeExecutionState(bool* caught_exception) { +MaybeHandle<Object> Debugger::MakeExecutionState() { // Create the execution state object. Handle<Object> break_id = isolate_->factory()->NewNumberFromInt( isolate_->debug()->break_id()); Handle<Object> argv[] = { break_id }; - return MakeJSObject(CStrVector("MakeExecutionState"), - ARRAY_SIZE(argv), - argv, - caught_exception); + return MakeJSObject(CStrVector("MakeExecutionState"), ARRAY_SIZE(argv), argv); } -Handle<Object> Debugger::MakeBreakEvent(Handle<Object> exec_state, - Handle<Object> break_points_hit, - bool* caught_exception) { +MaybeHandle<Object> Debugger::MakeBreakEvent(Handle<Object> break_points_hit) { + Handle<Object> exec_state; + if (!MakeExecutionState().ToHandle(&exec_state)) return MaybeHandle<Object>(); // Create the new break event object. Handle<Object> argv[] = { exec_state, break_points_hit }; - return MakeJSObject(CStrVector("MakeBreakEvent"), - ARRAY_SIZE(argv), - argv, - caught_exception); + return MakeJSObject(CStrVector("MakeBreakEvent"), ARRAY_SIZE(argv), argv); } -Handle<Object> Debugger::MakeExceptionEvent(Handle<Object> exec_state, - Handle<Object> exception, - bool uncaught, - bool* caught_exception) { - Factory* factory = isolate_->factory(); +MaybeHandle<Object> Debugger::MakeExceptionEvent(Handle<Object> exception, + bool uncaught, + Handle<Object> promise) { + Handle<Object> exec_state; + if (!MakeExecutionState().ToHandle(&exec_state)) return MaybeHandle<Object>(); // Create the new exception event object. Handle<Object> argv[] = { exec_state, exception, - factory->ToBoolean(uncaught) }; - return MakeJSObject(CStrVector("MakeExceptionEvent"), - ARRAY_SIZE(argv), - argv, - caught_exception); + isolate_->factory()->ToBoolean(uncaught), + promise }; + return MakeJSObject(CStrVector("MakeExceptionEvent"), ARRAY_SIZE(argv), argv); } -Handle<Object> Debugger::MakeNewFunctionEvent(Handle<Object> function, - bool* caught_exception) { - // Create the new function event object. - Handle<Object> argv[] = { function }; - return MakeJSObject(CStrVector("MakeNewFunctionEvent"), - ARRAY_SIZE(argv), - argv, - caught_exception); -} - - -Handle<Object> Debugger::MakeCompileEvent(Handle<Script> script, - bool before, - bool* caught_exception) { - Factory* factory = isolate_->factory(); +MaybeHandle<Object> Debugger::MakeCompileEvent(Handle<Script> script, + bool before) { + Handle<Object> exec_state; + if (!MakeExecutionState().ToHandle(&exec_state)) return MaybeHandle<Object>(); // Create the compile event object. - Handle<Object> exec_state = MakeExecutionState(caught_exception); - Handle<Object> script_wrapper = GetScriptWrapper(script); + Handle<Object> script_wrapper = Script::GetWrapper(script); Handle<Object> argv[] = { exec_state, script_wrapper, - factory->ToBoolean(before) }; - return MakeJSObject(CStrVector("MakeCompileEvent"), - ARRAY_SIZE(argv), - argv, - caught_exception); + isolate_->factory()->ToBoolean(before) }; + return MakeJSObject(CStrVector("MakeCompileEvent"), ARRAY_SIZE(argv), argv); } -Handle<Object> Debugger::MakeScriptCollectedEvent(int id, - bool* caught_exception) { +MaybeHandle<Object> Debugger::MakeScriptCollectedEvent(int id) { + Handle<Object> exec_state; + if (!MakeExecutionState().ToHandle(&exec_state)) return MaybeHandle<Object>(); // Create the script collected event object. - Handle<Object> exec_state = MakeExecutionState(caught_exception); Handle<Object> id_object = Handle<Smi>(Smi::FromInt(id), isolate_); Handle<Object> argv[] = { exec_state, id_object }; - return MakeJSObject(CStrVector("MakeScriptCollectedEvent"), - ARRAY_SIZE(argv), - argv, - caught_exception); + return MakeJSObject( + CStrVector("MakeScriptCollectedEvent"), ARRAY_SIZE(argv), argv); } @@ -2709,6 +2777,9 @@ void Debugger::OnException(Handle<Object> exception, bool uncaught) { if (debug->InDebugger()) return; if (!Debugger::EventActive(v8::Exception)) return; + Handle<Object> promise = debug->GetPromiseForUncaughtException(); + uncaught |= !promise->IsUndefined(); + // Bail out if exception breaks are not active if (uncaught) { // Uncaught exceptions are reported by either flags. @@ -2725,16 +2796,12 @@ void Debugger::OnException(Handle<Object> exception, bool uncaught) { // Clear all current stepping setup. debug->ClearStepping(); + // Create the event data object. - bool caught_exception = false; - Handle<Object> exec_state = MakeExecutionState(&caught_exception); Handle<Object> event_data; - if (!caught_exception) { - event_data = MakeExceptionEvent(exec_state, exception, uncaught, - &caught_exception); - } // Bail out and don't call debugger if exception. - if (caught_exception) { + if (!MakeExceptionEvent( + exception, uncaught, promise).ToHandle(&event_data)) { return; } @@ -2758,17 +2825,9 @@ void Debugger::OnDebugBreak(Handle<Object> break_points_hit, ASSERT(isolate_->context() == *isolate_->debug()->debug_context()); // Create the event data object. - bool caught_exception = false; - Handle<Object> exec_state = MakeExecutionState(&caught_exception); Handle<Object> event_data; - if (!caught_exception) { - event_data = MakeBreakEvent(exec_state, break_points_hit, - &caught_exception); - } // Bail out and don't call debugger if exception. - if (caught_exception) { - return; - } + if (!MakeBreakEvent(break_points_hit).ToHandle(&event_data)) return; // Process debug event. ProcessDebugEvent(v8::Break, @@ -2790,12 +2849,9 @@ void Debugger::OnBeforeCompile(Handle<Script> script) { if (debugger.FailedToEnter()) return; // Create the event data object. - bool caught_exception = false; - Handle<Object> event_data = MakeCompileEvent(script, true, &caught_exception); + Handle<Object> event_data; // Bail out and don't call debugger if exception. - if (caught_exception) { - return; - } + if (!MakeCompileEvent(script, true).ToHandle(&event_data)) return; // Process debug event. ProcessDebugEvent(v8::BeforeCompile, @@ -2833,11 +2889,10 @@ void Debugger::OnAfterCompile(Handle<Script> script, Handle<String> update_script_break_points_string = isolate_->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("UpdateScriptBreakPoints")); + Handle<GlobalObject> debug_global(debug->debug_context()->global_object()); Handle<Object> update_script_break_points = - Handle<Object>( - debug->debug_context()->global_object()->GetPropertyNoExceptionThrown( - *update_script_break_points_string), - isolate_); + Object::GetProperty( + debug_global, update_script_break_points_string).ToHandleChecked(); if (!update_script_break_points->IsJSFunction()) { return; } @@ -2845,17 +2900,14 @@ void Debugger::OnAfterCompile(Handle<Script> script, // Wrap the script object in a proper JS object before passing it // to JavaScript. - Handle<JSValue> wrapper = GetScriptWrapper(script); + Handle<Object> wrapper = Script::GetWrapper(script); // Call UpdateScriptBreakPoints expect no exceptions. - bool caught_exception; Handle<Object> argv[] = { wrapper }; - Execution::TryCall(Handle<JSFunction>::cast(update_script_break_points), - isolate_->js_builtins_object(), - ARRAY_SIZE(argv), - argv, - &caught_exception); - if (caught_exception) { + if (Execution::TryCall(Handle<JSFunction>::cast(update_script_break_points), + isolate_->js_builtins_object(), + ARRAY_SIZE(argv), + argv).is_null()) { return; } // Bail out based on state or if there is no listener for this event @@ -2863,17 +2915,12 @@ void Debugger::OnAfterCompile(Handle<Script> script, if (!Debugger::EventActive(v8::AfterCompile)) return; // Create the compile state object. - Handle<Object> event_data = MakeCompileEvent(script, - false, - &caught_exception); + Handle<Object> event_data; // Bail out and don't call debugger if exception. - if (caught_exception) { - return; - } + if (!MakeCompileEvent(script, false).ToHandle(&event_data)) return; + // Process debug event. - ProcessDebugEvent(v8::AfterCompile, - Handle<JSObject>::cast(event_data), - true); + ProcessDebugEvent(v8::AfterCompile, Handle<JSObject>::cast(event_data), true); } @@ -2890,13 +2937,9 @@ void Debugger::OnScriptCollected(int id) { if (debugger.FailedToEnter()) return; // Create the script collected state object. - bool caught_exception = false; - Handle<Object> event_data = MakeScriptCollectedEvent(id, - &caught_exception); + Handle<Object> event_data; // Bail out and don't call debugger if exception. - if (caught_exception) { - return; - } + if (!MakeScriptCollectedEvent(id).ToHandle(&event_data)) return; // Process debug event. ProcessDebugEvent(v8::ScriptCollected, @@ -2916,11 +2959,10 @@ void Debugger::ProcessDebugEvent(v8::DebugEvent event, } // Create the execution state. - bool caught_exception = false; - Handle<Object> exec_state = MakeExecutionState(&caught_exception); - if (caught_exception) { - return; - } + Handle<Object> exec_state; + // Bail out and don't call debugger if exception. + if (!MakeExecutionState().ToHandle(&exec_state)) return; + // First notify the message handler if any. if (message_handler_ != NULL) { NotifyMessageHandler(event, @@ -2991,12 +3033,10 @@ void Debugger::CallJSEventCallback(v8::DebugEvent event, exec_state, event_data, event_listener_data_ }; - bool caught_exception; Execution::TryCall(fun, isolate_->global_object(), ARRAY_SIZE(argv), - argv, - &caught_exception); + argv); // Silently ignore exceptions from debug event listeners. } @@ -3248,7 +3288,8 @@ void Debugger::SetMessageHandler(v8::Debug::MessageHandler2 handler) { void Debugger::ListenersChanged() { - if (IsDebuggerActive()) { + bool active = IsDebuggerActive(); + if (active) { // Disable the compilation cache when the debugger is active. isolate_->compilation_cache()->Disable(); debugger_unload_pending_ = false; @@ -3350,9 +3391,8 @@ bool Debugger::IsDebuggerActive() { } -Handle<Object> Debugger::Call(Handle<JSFunction> fun, - Handle<Object> data, - bool* pending_exception) { +MaybeHandle<Object> Debugger::Call(Handle<JSFunction> fun, + Handle<Object> data) { // When calling functions in the debugger prevent it from beeing unloaded. Debugger::never_unload_debugger_ = true; @@ -3363,22 +3403,19 @@ Handle<Object> Debugger::Call(Handle<JSFunction> fun, } // Create the execution state. - bool caught_exception = false; - Handle<Object> exec_state = MakeExecutionState(&caught_exception); - if (caught_exception) { + Handle<Object> exec_state; + if (!MakeExecutionState().ToHandle(&exec_state)) { return isolate_->factory()->undefined_value(); } Handle<Object> argv[] = { exec_state, data }; - Handle<Object> result = Execution::Call( + return Execution::Call( isolate_, fun, Handle<Object>(isolate_->debug()->debug_context_->global_proxy(), isolate_), ARRAY_SIZE(argv), - argv, - pending_exception); - return result; + argv); } @@ -3397,7 +3434,7 @@ bool Debugger::StartAgent(const char* name, int port, // Once become suspended, V8 will stay so indefinitely long, until remote // debugger connects and issues "continue" command. Debugger::message_handler_ = StubMessageHandler2; - v8::Debug::DebugBreak(); + v8::Debug::DebugBreak(reinterpret_cast<v8::Isolate*>(isolate_)); } if (agent_ == NULL) { @@ -3597,20 +3634,21 @@ v8::Handle<v8::Object> MessageImpl::GetEventData() const { v8::Handle<v8::String> MessageImpl::GetJSON() const { - v8::EscapableHandleScope scope( - reinterpret_cast<v8::Isolate*>(event_data_->GetIsolate())); + Isolate* isolate = event_data_->GetIsolate(); + v8::EscapableHandleScope scope(reinterpret_cast<v8::Isolate*>(isolate)); if (IsEvent()) { // Call toJSONProtocol on the debug event object. - Handle<Object> fun = GetProperty(event_data_, "toJSONProtocol"); + Handle<Object> fun = Object::GetProperty( + isolate, event_data_, "toJSONProtocol").ToHandleChecked(); if (!fun->IsJSFunction()) { return v8::Handle<v8::String>(); } - bool caught_exception; - Handle<Object> json = Execution::TryCall(Handle<JSFunction>::cast(fun), - event_data_, - 0, NULL, &caught_exception); - if (caught_exception || !json->IsString()) { + + MaybeHandle<Object> maybe_json = + Execution::TryCall(Handle<JSFunction>::cast(fun), event_data_, 0, NULL); + Handle<Object> json; + if (!maybe_json.ToHandle(&json) || !json->IsString()) { return v8::Handle<v8::String>(); } return scope.Escape(v8::Utils::ToLocal(Handle<String>::cast(json))); @@ -3815,6 +3853,4 @@ void MessageDispatchHelperThread::Run() { } } -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h index 564f9e885..457a5fad8 100644 --- a/deps/v8/src/debug.h +++ b/deps/v8/src/debug.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DEBUG_H_ #define V8_DEBUG_H_ @@ -41,7 +18,6 @@ #include "string-stream.h" #include "v8threads.h" -#ifdef ENABLE_DEBUGGER_SUPPORT #include "../include/v8-debug.h" namespace v8 { @@ -175,7 +151,9 @@ class BreakLocationIterator { class ScriptCache : private HashMap { public: explicit ScriptCache(Isolate* isolate) - : HashMap(ScriptMatch), isolate_(isolate), collected_scripts_(10) {} + : HashMap(HashMap::PointersMatch), + isolate_(isolate), + collected_scripts_(10) {} virtual ~ScriptCache() { Clear(); } // Add script to the cache. @@ -193,9 +171,6 @@ class ScriptCache : private HashMap { return ComputeIntegerHash(key, v8::internal::kZeroHashSeed); } - // Scripts match if their keys (script id) match. - static bool ScriptMatch(void* key1, void* key2) { return key1 == key2; } - // Clear the cache releasing all the weak handles. void Clear(); @@ -237,13 +212,11 @@ class DebugInfoListNode { // DebugInfo. class Debug { public: - void SetUp(bool create_heap_objects); bool Load(); void Unload(); bool IsLoaded() { return !debug_context_.is_null(); } bool InDebugger() { return thread_local_.debugger_entry_ != NULL; } void PreemptionWhileInDebugger(); - void Iterate(ObjectVisitor* v); Object* Break(Arguments args); void SetBreakPoint(Handle<JSFunction> function, @@ -260,6 +233,12 @@ class Debug { void FloodHandlerWithOneShot(); void ChangeBreakOnException(ExceptionBreakType type, bool enable); bool IsBreakOnException(ExceptionBreakType type); + + void PromiseHandlePrologue(Handle<JSFunction> promise_getter); + void PromiseHandleEpilogue(); + // Returns a promise if it does not have a reject handler. + Handle<Object> GetPromiseForUncaughtException(); + void PrepareStep(StepAction step_action, int step_count, StackFrame::Id frame_id); @@ -366,8 +345,6 @@ class Debug { enum AddressId { k_after_break_target_address, - k_debug_break_return_address, - k_debug_break_slot_address, k_restarter_frame_function_pointer }; @@ -385,18 +362,6 @@ class Debug { return ®isters_[r]; } - // Access to the debug break on return code. - Code* debug_break_return() { return debug_break_return_; } - Code** debug_break_return_address() { - return &debug_break_return_; - } - - // Access to the debug break in debug break slot code. - Code* debug_break_slot() { return debug_break_slot_; } - Code** debug_break_slot_address() { - return &debug_break_slot_; - } - static const int kEstimatedNofDebugInfoEntries = 16; static const int kEstimatedNofBreakPointsInFunction = 16; @@ -431,6 +396,7 @@ class Debug { // Code generator routines. static void GenerateSlot(MacroAssembler* masm); + static void GenerateCallICStubDebugBreak(MacroAssembler* masm); static void GenerateLoadICDebugBreak(MacroAssembler* masm); static void GenerateStoreICDebugBreak(MacroAssembler* masm); static void GenerateKeyedLoadICDebugBreak(MacroAssembler* masm); @@ -438,7 +404,6 @@ class Debug { static void GenerateCompareNilICDebugBreak(MacroAssembler* masm); static void GenerateReturnDebugBreak(MacroAssembler* masm); static void GenerateCallFunctionStubDebugBreak(MacroAssembler* masm); - static void GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm); static void GenerateCallConstructStubDebugBreak(MacroAssembler* masm); static void GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm); static void GenerateSlotDebugBreak(MacroAssembler* masm); @@ -450,9 +415,6 @@ class Debug { // called, it only gets returned to. static void GenerateFrameDropperLiveEdit(MacroAssembler* masm); - // Called from stub-cache.cc. - static void GenerateCallICDebugBreak(MacroAssembler* masm); - // Describes how exactly a frame has been dropped from stack. enum FrameDropMode { // No frame has been dropped. @@ -549,6 +511,10 @@ class Debug { Handle<Object> CheckBreakPoints(Handle<Object> break_point); bool CheckBreakPoint(Handle<Object> break_point_object); + void MaybeRecompileFunctionForDebugging(Handle<JSFunction> function); + void RecompileAndRelocateSuspendedGenerators( + const List<Handle<JSGeneratorObject> > &suspended_generators); + // Global handle to debug context where all the debugger JavaScript code is // loaded. Handle<Context> debug_context_; @@ -566,6 +532,14 @@ class Debug { bool break_on_exception_; bool break_on_uncaught_exception_; + // When a promise is being resolved, we may want to trigger a debug event for + // the case we catch a throw. For this purpose we remember the try-catch + // handler address that would catch the exception. We also hold onto a + // closure that returns a promise if the exception is considered uncaught. + // Due to the possibility of reentry we use a list to form a stack. + List<StackHandler*> promise_catch_handlers_; + List<Handle<JSFunction> > promise_getters_; + // Per-thread data. class ThreadLocal { public: @@ -624,12 +598,6 @@ class Debug { ThreadLocal thread_local_; void ThreadInit(); - // Code to call for handling debug break on return. - Code* debug_break_return_; - - // Code to call for handling debug break in debug break slots. - Code* debug_break_slot_; - Isolate* isolate_; friend class Isolate; @@ -638,7 +606,7 @@ class Debug { }; -DECLARE_RUNTIME_FUNCTION(Object*, Debug_Break); +DECLARE_RUNTIME_FUNCTION(Debug_Break); // Message delivered to the message handler callback. This is either a debugger @@ -786,25 +754,21 @@ class Debugger { void DebugRequest(const uint16_t* json_request, int length); - Handle<Object> MakeJSObject(Vector<const char> constructor_name, - int argc, - Handle<Object> argv[], - bool* caught_exception); - Handle<Object> MakeExecutionState(bool* caught_exception); - Handle<Object> MakeBreakEvent(Handle<Object> exec_state, - Handle<Object> break_points_hit, - bool* caught_exception); - Handle<Object> MakeExceptionEvent(Handle<Object> exec_state, - Handle<Object> exception, - bool uncaught, - bool* caught_exception); - Handle<Object> MakeNewFunctionEvent(Handle<Object> func, - bool* caught_exception); - Handle<Object> MakeCompileEvent(Handle<Script> script, - bool before, - bool* caught_exception); - Handle<Object> MakeScriptCollectedEvent(int id, - bool* caught_exception); + MUST_USE_RESULT MaybeHandle<Object> MakeJSObject( + Vector<const char> constructor_name, + int argc, + Handle<Object> argv[]); + MUST_USE_RESULT MaybeHandle<Object> MakeExecutionState(); + MUST_USE_RESULT MaybeHandle<Object> MakeBreakEvent( + Handle<Object> break_points_hit); + MUST_USE_RESULT MaybeHandle<Object> MakeExceptionEvent( + Handle<Object> exception, + bool uncaught, + Handle<Object> promise); + MUST_USE_RESULT MaybeHandle<Object> MakeCompileEvent( + Handle<Script> script, bool before); + MUST_USE_RESULT MaybeHandle<Object> MakeScriptCollectedEvent(int id); + void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue); void OnException(Handle<Object> exception, bool uncaught); void OnBeforeCompile(Handle<Script> script); @@ -844,9 +808,8 @@ class Debugger { // Enqueue a debugger command to the command queue for event listeners. void EnqueueDebugCommand(v8::Debug::ClientData* client_data = NULL); - Handle<Object> Call(Handle<JSFunction> fun, - Handle<Object> data, - bool* pending_exception); + MUST_USE_RESULT MaybeHandle<Object> Call(Handle<JSFunction> fun, + Handle<Object> data); // Start the debugger agent listening on the provided port. bool StartAgent(const char* name, int port, @@ -1015,10 +978,6 @@ class Debug_Address { return Debug_Address(Debug::k_after_break_target_address); } - static Debug_Address DebugBreakReturn() { - return Debug_Address(Debug::k_debug_break_return_address); - } - static Debug_Address RestarterFrameFunctionPointer() { return Debug_Address(Debug::k_restarter_frame_function_pointer); } @@ -1028,10 +987,6 @@ class Debug_Address { switch (id_) { case Debug::k_after_break_target_address: return reinterpret_cast<Address>(debug->after_break_target_address()); - case Debug::k_debug_break_return_address: - return reinterpret_cast<Address>(debug->debug_break_return_address()); - case Debug::k_debug_break_slot_address: - return reinterpret_cast<Address>(debug->debug_break_slot_address()); case Debug::k_restarter_frame_function_pointer: return reinterpret_cast<Address>( debug->restarter_frame_function_pointer_address()); @@ -1070,6 +1025,4 @@ class MessageDispatchHelperThread: public Thread { } } // namespace v8::internal -#endif // ENABLE_DEBUGGER_SUPPORT - #endif // V8_DEBUG_H_ diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc index 4d5e60573..e8cf5993e 100644 --- a/deps/v8/src/deoptimizer.cc +++ b/deps/v8/src/deoptimizer.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -56,9 +33,7 @@ static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) { DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) : allocator_(allocator), -#ifdef ENABLE_DEBUGGER_SUPPORT deoptimized_frame_info_(NULL), -#endif current_(NULL) { for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { deopt_entry_code_entries_[i] = -1; @@ -75,13 +50,11 @@ DeoptimizerData::~DeoptimizerData() { } -#ifdef ENABLE_DEBUGGER_SUPPORT void DeoptimizerData::Iterate(ObjectVisitor* v) { if (deoptimized_frame_info_ != NULL) { deoptimized_frame_info_->Iterate(v); } } -#endif Code* Deoptimizer::FindDeoptimizingCode(Address addr) { @@ -91,7 +64,7 @@ Code* Deoptimizer::FindDeoptimizingCode(Address addr) { Object* element = native_context->DeoptimizedCodeListHead(); while (!element->IsUndefined()) { Code* code = Code::cast(element); - ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); + CHECK(code->kind() == Code::OPTIMIZED_FUNCTION); if (code->contains(addr)) return code; element = code->next_code_link(); } @@ -115,7 +88,7 @@ Deoptimizer* Deoptimizer::New(JSFunction* function, from, fp_to_sp_delta, NULL); - ASSERT(isolate->deoptimizer_data()->current_ == NULL); + CHECK(isolate->deoptimizer_data()->current_ == NULL); isolate->deoptimizer_data()->current_ = deoptimizer; return deoptimizer; } @@ -137,7 +110,7 @@ size_t Deoptimizer::GetMaxDeoptTableSize() { Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { Deoptimizer* result = isolate->deoptimizer_data()->current_; - ASSERT(result != NULL); + CHECK_NE(result, NULL); result->DeleteFrameDescriptions(); isolate->deoptimizer_data()->current_ = NULL; return result; @@ -160,13 +133,12 @@ int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) { } -#ifdef ENABLE_DEBUGGER_SUPPORT DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( JavaScriptFrame* frame, int jsframe_index, Isolate* isolate) { - ASSERT(frame->is_optimized()); - ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL); + CHECK(frame->is_optimized()); + CHECK(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL); // Get the function and code from the frame. JSFunction* function = frame->function(); @@ -176,7 +148,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( // return address must be at a place in the code with deoptimization support. SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc()); int deoptimization_index = safepoint_entry.deoptimization_index(); - ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex); + CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex); // Always use the actual stack slots when calculating the fp to sp // delta adding two for the function and context. @@ -199,7 +171,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( // Create the GC safe output frame information and register it for GC // handling. - ASSERT_LT(jsframe_index, deoptimizer->jsframe_count()); + CHECK_LT(jsframe_index, deoptimizer->jsframe_count()); // Convert JS frame index into frame index. int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index); @@ -251,11 +223,11 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, Isolate* isolate) { - ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info); + CHECK_EQ(isolate->deoptimizer_data()->deoptimized_frame_info_, info); delete info; isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL; } -#endif + void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, int count, @@ -269,7 +241,7 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext( Context* context, OptimizedFunctionVisitor* visitor) { DisallowHeapAllocation no_allocation; - ASSERT(context->IsNativeContext()); + CHECK(context->IsNativeContext()); visitor->EnterContext(context); @@ -292,13 +264,13 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext( context->SetOptimizedFunctionsListHead(next); } // The visitor should not alter the link directly. - ASSERT(function->next_function_link() == next); + CHECK_EQ(function->next_function_link(), next); // Set the next function link to undefined to indicate it is no longer // in the optimized functions list. function->set_next_function_link(context->GetHeap()->undefined_value()); } else { // The visitor should not alter the link directly. - ASSERT(function->next_function_link() == next); + CHECK_EQ(function->next_function_link(), next); // preserve this element. prev = function; } @@ -400,7 +372,7 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { Object* element = context->OptimizedCodeListHead(); while (!element->IsUndefined()) { Code* code = Code::cast(element); - ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); + CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); Object* next = code->next_code_link(); if (code->marked_for_deoptimization()) { // Put the code into the list for later patching. @@ -488,7 +460,7 @@ void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { } if (object->IsJSGlobalProxy()) { Object* proto = object->GetPrototype(); - ASSERT(proto->IsJSGlobalObject()); + CHECK(proto->IsJSGlobalObject()); Context* native_context = GlobalObject::cast(proto)->native_context(); MarkAllCodeForContext(native_context); DeoptimizeMarkedCodeForContext(native_context); @@ -504,7 +476,7 @@ void Deoptimizer::MarkAllCodeForContext(Context* context) { Object* element = context->OptimizedCodeListHead(); while (!element->IsUndefined()) { Code* code = Code::cast(element); - ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); + CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); code->set_marked_for_deoptimization(true); element = code->next_code_link(); } @@ -539,7 +511,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, ? FLAG_trace_stub_failures : FLAG_trace_deopt; } - UNREACHABLE(); + FATAL("Unsupported deopt type"); return false; } @@ -551,7 +523,7 @@ const char* Deoptimizer::MessageFor(BailoutType type) { case LAZY: return "lazy"; case DEBUGGER: return "debugger"; } - UNREACHABLE(); + FATAL("Unsupported deopt type"); return NULL; } @@ -641,7 +613,7 @@ Code* Deoptimizer::FindOptimizedCode(JSFunction* function, ASSERT(optimized_code->contains(from_)); return optimized_code; } - UNREACHABLE(); + FATAL("Could not find code for optimized function"); return NULL; } @@ -684,15 +656,15 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, int id, BailoutType type, GetEntryMode mode) { - ASSERT(id >= 0); + CHECK_GE(id, 0); if (id >= kMaxNumberOfEntries) return NULL; if (mode == ENSURE_ENTRY_CODE) { EnsureCodeForDeoptimizationEntry(isolate, type, id); } else { - ASSERT(mode == CALCULATE_ENTRY_ADDRESS); + CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS); } DeoptimizerData* data = isolate->deoptimizer_data(); - ASSERT(type < kBailoutTypesWithCodeEntry); + CHECK_LT(type, kBailoutTypesWithCodeEntry); MemoryChunk* base = data->deopt_entry_code_[type]; return base->area_start() + (id * table_entry_size_); } @@ -855,7 +827,7 @@ void Deoptimizer::DoComputeOutputFrames() { case Translation::LITERAL: case Translation::ARGUMENTS_OBJECT: default: - UNREACHABLE(); + FATAL("Unsupported translation"); break; } } @@ -894,7 +866,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, } else { int closure_id = iterator->Next(); USE(closure_id); - ASSERT_EQ(Translation::kSelfLiteralId, closure_id); + CHECK_EQ(Translation::kSelfLiteralId, closure_id); function = function_; } unsigned height = iterator->Next(); @@ -919,8 +891,8 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, bool is_bottommost = (0 == frame_index); bool is_topmost = (output_count_ - 1 == frame_index); - ASSERT(frame_index >= 0 && frame_index < output_count_); - ASSERT(output_[frame_index] == NULL); + CHECK(frame_index >= 0 && frame_index < output_count_); + CHECK_EQ(output_[frame_index], NULL); output_[frame_index] = output_frame; // The top address for the bottommost output frame can be computed from @@ -1064,7 +1036,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, output_offset -= kPointerSize; DoTranslateCommand(iterator, frame_index, output_offset); } - ASSERT(0 == output_offset); + CHECK_EQ(0, output_offset); // Compute this frame's PC, state, and continuation. Code* non_optimized_code = function->shared()->code(); @@ -1101,7 +1073,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, } else if (bailout_type_ == SOFT) { continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); } else { - ASSERT(bailout_type_ == EAGER); + CHECK_EQ(bailout_type_, EAGER); } output_frame->SetContinuation( reinterpret_cast<intptr_t>(continuation->entry())); @@ -1128,8 +1100,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR); // Arguments adaptor can not be topmost or bottommost. - ASSERT(frame_index > 0 && frame_index < output_count_ - 1); - ASSERT(output_[frame_index] == NULL); + CHECK(frame_index > 0 && frame_index < output_count_ - 1); + CHECK(output_[frame_index] == NULL); output_[frame_index] = output_frame; // The top address of the frame is computed from the previous @@ -1275,7 +1247,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, // receiver parameter through the translation. It might be encoding // a captured object, patch the slot address for a captured object. if (i == 0 && deferred_objects_.length() > deferred_object_index) { - ASSERT(!deferred_objects_[deferred_object_index].is_arguments()); + CHECK(!deferred_objects_[deferred_object_index].is_arguments()); deferred_objects_[deferred_object_index].patch_slot_address(top_address); } } @@ -1386,7 +1358,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, top_address + output_offset, output_offset, value); } - ASSERT(0 == output_offset); + CHECK_EQ(0, output_offset); intptr_t pc = reinterpret_cast<intptr_t>( construct_stub->instruction_start() + @@ -1432,8 +1404,8 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, output_frame->SetFrameType(StackFrame::INTERNAL); // A frame for an accessor stub can not be the topmost or bottommost one. - ASSERT(frame_index > 0 && frame_index < output_count_ - 1); - ASSERT(output_[frame_index] == NULL); + CHECK(frame_index > 0 && frame_index < output_count_ - 1); + CHECK_EQ(output_[frame_index], NULL); output_[frame_index] = output_frame; // The top address of the frame is computed from the previous frame's top and @@ -1517,9 +1489,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, } // Skip receiver. - Translation::Opcode opcode = - static_cast<Translation::Opcode>(iterator->Next()); - iterator->Skip(Translation::NumberOfOperandsFor(opcode)); + DoTranslateObjectAndSkip(iterator); if (is_setter_stub_frame) { // The implicit return value was part of the artificial setter stub @@ -1528,7 +1498,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, DoTranslateCommand(iterator, frame_index, output_offset); } - ASSERT(0 == output_offset); + CHECK_EQ(output_offset, 0); Smi* offset = is_setter_stub_frame ? isolate_->heap()->setter_stub_deopt_pc_offset() : @@ -1578,8 +1548,8 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, // reg = JSFunction context // - ASSERT(compiled_code_->is_crankshafted() && - compiled_code_->kind() != Code::OPTIMIZED_FUNCTION); + CHECK(compiled_code_->is_crankshafted() && + compiled_code_->kind() != Code::OPTIMIZED_FUNCTION); int major_key = compiled_code_->major_key(); CodeStubInterfaceDescriptor* descriptor = isolate_->code_stub_interface_descriptor(major_key); @@ -1588,7 +1558,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, // and the standard stack frame slots. Include space for an argument // object to the callee and optionally the space to pass the argument // object to the stub failure handler. - ASSERT(descriptor->register_param_count_ >= 0); + CHECK_GE(descriptor->register_param_count_, 0); int height_in_bytes = kPointerSize * descriptor->register_param_count_ + sizeof(Arguments) + kPointerSize; int fixed_frame_size = StandardFrameConstants::kFixedFrameSize; @@ -1605,7 +1575,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, FrameDescription* output_frame = new(output_frame_size) FrameDescription(output_frame_size, NULL); output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); - ASSERT(frame_index == 0); + CHECK_EQ(frame_index, 0); output_[frame_index] = output_frame; // The top address for the output frame can be computed from the input @@ -1663,7 +1633,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, output_frame->SetRegister(context_reg.code(), value); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); - ASSERT(reinterpret_cast<Object*>(value)->IsContext()); + CHECK(reinterpret_cast<Object*>(value)->IsContext()); if (trace_scope_ != NULL) { PrintF(trace_scope_->file(), " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" @@ -1741,10 +1711,10 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, } } - ASSERT(0 == output_frame_offset); + CHECK_EQ(output_frame_offset, 0); if (!arg_count_known) { - ASSERT(arguments_length_offset >= 0); + CHECK_GE(arguments_length_offset, 0); // We know it's a smi because 1) the code stub guarantees the stack // parameter count is in smi range, and 2) the DoTranslateCommand in the // parameter loop above translated that to a tagged value. @@ -1780,8 +1750,8 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, // Compute this frame's PC, state, and continuation. Code* trampoline = NULL; StubFunctionMode function_mode = descriptor->function_mode_; - StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline, - isolate_); + StubFailureTrampolineStub(isolate_, + function_mode).FindCodeInCache(&trampoline); ASSERT(trampoline != NULL); output_frame->SetPc(reinterpret_cast<intptr_t>( trampoline->instruction_start())); @@ -1828,7 +1798,7 @@ Handle<Object> Deoptimizer::MaterializeNextHeapObject() { Handle<JSObject> arguments = isolate_->factory()->NewArgumentsObject(function, length); Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); - ASSERT(array->length() == length); + ASSERT_EQ(array->length(), length); arguments->set_elements(*array); materialized_objects_->Add(arguments); for (int i = 0; i < length; ++i) { @@ -1840,7 +1810,7 @@ Handle<Object> Deoptimizer::MaterializeNextHeapObject() { // We also need to make sure that the representation of all fields // in the given object are general enough to hold a tagged value. Handle<Map> map = Map::GeneralizeAllFieldRepresentations( - Handle<Map>::cast(MaterializeNextValue()), Representation::Tagged()); + Handle<Map>::cast(MaterializeNextValue())); switch (map->instance_type()) { case HEAP_NUMBER_TYPE: { // Reuse the HeapNumber value directly as it is already properly @@ -1894,7 +1864,7 @@ Handle<Object> Deoptimizer::MaterializeNextHeapObject() { default: PrintF(stderr, "[couldn't handle instance type %d]\n", map->instance_type()); - UNREACHABLE(); + FATAL("Unsupported instance type"); } } @@ -1988,7 +1958,9 @@ void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { // materialize a new instance of the object if necessary. Store // the materialized object into the frame slot. Handle<Object> object = MaterializeNextHeapObject(); - Memory::Object_at(descriptor.slot_address()) = *object; + if (descriptor.slot_address() != NULL) { + Memory::Object_at(descriptor.slot_address()) = *object; + } if (trace_scope_ != NULL) { if (descriptor.is_arguments()) { PrintF(trace_scope_->file(), @@ -2007,8 +1979,8 @@ void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { } } - ASSERT(materialization_object_index_ == materialized_objects_->length()); - ASSERT(materialization_value_index_ == materialized_values_->length()); + CHECK_EQ(materialization_object_index_, materialized_objects_->length()); + CHECK_EQ(materialization_value_index_, materialized_values_->length()); } if (prev_materialized_count_ > 0) { @@ -2017,14 +1989,13 @@ void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { } -#ifdef ENABLE_DEBUGGER_SUPPORT void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame( Address parameters_top, uint32_t parameters_size, Address expressions_top, uint32_t expressions_size, DeoptimizedFrameInfo* info) { - ASSERT_EQ(DEBUGGER, bailout_type_); + CHECK_EQ(DEBUGGER, bailout_type_); Address parameters_bottom = parameters_top + parameters_size; Address expressions_bottom = expressions_top + expressions_size; for (int i = 0; i < deferred_heap_numbers_.length(); i++) { @@ -2070,7 +2041,6 @@ void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame( } } } -#endif static const char* TraceValueType(bool is_smi) { @@ -2082,6 +2052,73 @@ static const char* TraceValueType(bool is_smi) { } +void Deoptimizer::DoTranslateObjectAndSkip(TranslationIterator* iterator) { + Translation::Opcode opcode = + static_cast<Translation::Opcode>(iterator->Next()); + + switch (opcode) { + case Translation::BEGIN: + case Translation::JS_FRAME: + case Translation::ARGUMENTS_ADAPTOR_FRAME: + case Translation::CONSTRUCT_STUB_FRAME: + case Translation::GETTER_STUB_FRAME: + case Translation::SETTER_STUB_FRAME: + case Translation::COMPILED_STUB_FRAME: { + FATAL("Unexpected frame start translation opcode"); + return; + } + + case Translation::REGISTER: + case Translation::INT32_REGISTER: + case Translation::UINT32_REGISTER: + case Translation::DOUBLE_REGISTER: + case Translation::STACK_SLOT: + case Translation::INT32_STACK_SLOT: + case Translation::UINT32_STACK_SLOT: + case Translation::DOUBLE_STACK_SLOT: + case Translation::LITERAL: { + // The value is not part of any materialized object, so we can ignore it. + iterator->Skip(Translation::NumberOfOperandsFor(opcode)); + return; + } + + case Translation::DUPLICATED_OBJECT: { + int object_index = iterator->Next(); + if (trace_scope_ != NULL) { + PrintF(trace_scope_->file(), " skipping object "); + PrintF(trace_scope_->file(), + " ; duplicate of object #%d\n", object_index); + } + AddObjectDuplication(0, object_index); + return; + } + + case Translation::ARGUMENTS_OBJECT: + case Translation::CAPTURED_OBJECT: { + int length = iterator->Next(); + bool is_args = opcode == Translation::ARGUMENTS_OBJECT; + if (trace_scope_ != NULL) { + PrintF(trace_scope_->file(), " skipping object "); + PrintF(trace_scope_->file(), + " ; object (length = %d, is_args = %d)\n", length, is_args); + } + + AddObjectStart(0, length, is_args); + + // We save the object values on the side and materialize the actual + // object after the deoptimized frame is built. + int object_index = deferred_objects_.length() - 1; + for (int i = 0; i < length; i++) { + DoTranslateObject(iterator, object_index, i); + } + return; + } + } + + FATAL("Unexpected translation opcode"); +} + + void Deoptimizer::DoTranslateObject(TranslationIterator* iterator, int object_index, int field_index) { @@ -2099,7 +2136,7 @@ void Deoptimizer::DoTranslateObject(TranslationIterator* iterator, case Translation::GETTER_STUB_FRAME: case Translation::SETTER_STUB_FRAME: case Translation::COMPILED_STUB_FRAME: - UNREACHABLE(); + FATAL("Unexpected frame start translation opcode"); return; case Translation::REGISTER: { @@ -2339,6 +2376,8 @@ void Deoptimizer::DoTranslateObject(TranslationIterator* iterator, return; } } + + FATAL("Unexpected translation opcode"); } @@ -2360,7 +2399,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator, case Translation::GETTER_STUB_FRAME: case Translation::SETTER_STUB_FRAME: case Translation::COMPILED_STUB_FRAME: - UNREACHABLE(); + FATAL("Unexpected translation opcode"); return; case Translation::REGISTER: { @@ -2632,13 +2671,11 @@ unsigned Deoptimizer::ComputeInputFrameSize() const { // function into account so we have to avoid double counting them. unsigned result = fixed_size + fp_to_sp_delta_ - StandardFrameConstants::kFixedFrameSizeFromFp; -#ifdef DEBUG if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) { unsigned stack_slots = compiled_code_->stack_slots(); unsigned outgoing_size = ComputeOutgoingArgumentSize(); - ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size); + CHECK(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size); } -#endif return result; } @@ -2655,7 +2692,7 @@ unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const { // The incoming arguments is the values for formal parameters and // the receiver. Every slot contains a pointer. if (function->IsSmi()) { - ASSERT(Smi::cast(function) == Smi::FromInt(StackFrame::STUB)); + CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB)); return 0; } unsigned arguments = function->shared()->formal_parameter_count() + 1; @@ -2720,13 +2757,13 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, // cause us to emit relocation information for the external // references. This is fine because the deoptimizer's code section // isn't meant to be serialized at all. - ASSERT(type == EAGER || type == SOFT || type == LAZY); + CHECK(type == EAGER || type == SOFT || type == LAZY); DeoptimizerData* data = isolate->deoptimizer_data(); int entry_count = data->deopt_entry_code_entries_[type]; if (max_entry_id < entry_count) return; entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); while (max_entry_id >= entry_count) entry_count *= 2; - ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries); + CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries); MacroAssembler masm(isolate, NULL, 16 * KB); masm.set_emit_debug_code(false); @@ -2736,8 +2773,8 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, ASSERT(!RelocInfo::RequiresRelocation(desc)); MemoryChunk* chunk = data->deopt_entry_code_[type]; - ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= - desc.instr_size); + CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= + desc.instr_size); chunk->CommitArea(desc.instr_size); CopyBytes(chunk->area_start(), desc.buffer, static_cast<size_t>(desc.instr_size)); @@ -2804,15 +2841,15 @@ int FrameDescription::ComputeParametersCount() { case StackFrame::STUB: return -1; // Minus receiver. default: - UNREACHABLE(); + FATAL("Unexpected stack frame type"); return 0; } } Object* FrameDescription::GetParameter(int index) { - ASSERT(index >= 0); - ASSERT(index < ComputeParametersCount()); + CHECK_GE(index, 0); + CHECK_LT(index, ComputeParametersCount()); // The slot indexes for incoming arguments are negative. unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount()); return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); @@ -2820,7 +2857,7 @@ Object* FrameDescription::GetParameter(int index) { unsigned FrameDescription::GetExpressionCount() { - ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); + CHECK_EQ(StackFrame::JAVA_SCRIPT, type_); unsigned size = GetFrameSize() - ComputeFixedSize(); return size / kPointerSize; } @@ -3022,7 +3059,7 @@ int Translation::NumberOfOperandsFor(Opcode opcode) { case JS_FRAME: return 3; } - UNREACHABLE(); + FATAL("Unexpected translation type"); return -1; } @@ -3132,14 +3169,13 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame, TranslationIterator it(data->TranslationByteArray(), data->TranslationIndex(deopt_index)->value()); Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); - ASSERT(opcode == Translation::BEGIN); + CHECK_EQ(opcode, Translation::BEGIN); it.Next(); // Drop frame count. stack_frame_id_ = frame->fp(); int jsframe_count = it.Next(); - USE(jsframe_count); - ASSERT(jsframe_count > inlined_jsframe_index); + CHECK_GT(jsframe_count, inlined_jsframe_index); int jsframes_to_skip = inlined_jsframe_index; int number_of_slots = -1; // Number of slots inside our frame (yet unknown) bool should_deopt = false; @@ -3148,7 +3184,7 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame, bool processed = false; if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) { if (jsframes_to_skip == 0) { - ASSERT(Translation::NumberOfOperandsFor(opcode) == 2); + CHECK_EQ(Translation::NumberOfOperandsFor(opcode), 2); it.Skip(1); // literal id int height = it.Next(); @@ -3195,7 +3231,7 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame, // the nested slots of captured objects number_of_slots--; SlotRef& slot = slot_refs_.last(); - ASSERT(slot.Representation() != SlotRef::ARGUMENTS_OBJECT); + CHECK_NE(slot.Representation(), SlotRef::ARGUMENTS_OBJECT); number_of_slots += slot.GetChildrenCount(); if (slot.Representation() == SlotRef::DEFERRED_OBJECT || slot.Representation() == SlotRef::DUPLICATE_OBJECT) { @@ -3269,7 +3305,7 @@ void SlotRefValueBuilder::Prepare(Isolate* isolate) { while (current_slot_ < first_slot_index_) { GetNext(isolate, 0); } - ASSERT(current_slot_ == first_slot_index_); + CHECK_EQ(current_slot_, first_slot_index_); } @@ -3331,8 +3367,8 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) { } case SlotRef::DEFERRED_OBJECT: { int length = slot.GetChildrenCount(); - ASSERT(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL || - slot_refs_[current_slot_].Representation() == SlotRef::TAGGED); + CHECK(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL || + slot_refs_[current_slot_].Representation() == SlotRef::TAGGED); int object_index = materialized_objects_.length(); if (object_index < prev_materialized_count_) { @@ -3341,7 +3377,7 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) { Handle<Object> map_object = slot_refs_[current_slot_].GetValue(isolate); Handle<Map> map = Map::GeneralizeAllFieldRepresentations( - Handle<Map>::cast(map_object), Representation::Tagged()); + Handle<Map>::cast(map_object)); current_slot_++; // TODO(jarin) this should be unified with the code in // Deoptimizer::MaterializeNextHeapObject() @@ -3393,6 +3429,7 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) { break; } UNREACHABLE(); + break; } case SlotRef::DUPLICATE_OBJECT: { @@ -3413,7 +3450,7 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) { void SlotRefValueBuilder::Finish(Isolate* isolate) { // We should have processed all the slots - ASSERT(slot_refs_.length() == current_slot_); + CHECK_EQ(slot_refs_.length(), current_slot_); if (materialized_objects_.length() > prev_materialized_count_) { // We have materialized some new objects, so we have to store them @@ -3434,7 +3471,7 @@ Handle<FixedArray> MaterializedObjectStore::Get(Address fp) { return Handle<FixedArray>::null(); } Handle<FixedArray> array = GetStackEntries(); - ASSERT(array->length() > index); + CHECK_GT(array->length(), index); return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate())); } @@ -3455,11 +3492,11 @@ void MaterializedObjectStore::Set(Address fp, void MaterializedObjectStore::Remove(Address fp) { int index = StackIdToIndex(fp); - ASSERT(index >= 0); + CHECK_GE(index, 0); frame_fps_.Remove(index); Handle<FixedArray> array = GetStackEntries(); - ASSERT(array->length() > index); + CHECK_LT(index, array->length()); for (int i = index; i < frame_fps_.length(); i++) { array->set(i, array->get(i + 1)); } @@ -3505,7 +3542,6 @@ Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) { return new_array; } -#ifdef ENABLE_DEBUGGER_SUPPORT DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer, int frame_index, @@ -3527,7 +3563,7 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer, if (has_arguments_adaptor) { output_frame = deoptimizer->output_[frame_index - 1]; - ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR); + CHECK_EQ(output_frame->GetFrameType(), StackFrame::ARGUMENTS_ADAPTOR); } parameters_count_ = output_frame->ComputeParametersCount(); @@ -3550,6 +3586,4 @@ void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); } -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h index a36362fc9..373f888ae 100644 --- a/deps/v8/src/deoptimizer.h +++ b/deps/v8/src/deoptimizer.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DEOPTIMIZER_H_ #define V8_DEOPTIMIZER_H_ @@ -169,7 +146,6 @@ class Deoptimizer : public Malloced { Isolate* isolate); static Deoptimizer* Grab(Isolate* isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT // The returned object with information on the optimized frame needs to be // freed before another one can be generated. static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame, @@ -177,7 +153,6 @@ class Deoptimizer : public Malloced { Isolate* isolate); static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, Isolate* isolate); -#endif // Makes sure that there is enough room in the relocation // information of a code object to perform lazy deoptimization @@ -212,14 +187,13 @@ class Deoptimizer : public Malloced { ~Deoptimizer(); void MaterializeHeapObjects(JavaScriptFrameIterator* it); -#ifdef ENABLE_DEBUGGER_SUPPORT + void MaterializeHeapNumbersForDebuggerInspectableFrame( Address parameters_top, uint32_t parameters_size, Address expressions_top, uint32_t expressions_size, DeoptimizedFrameInfo* info); -#endif static void ComputeOutputFrames(Deoptimizer* deoptimizer); @@ -329,14 +303,21 @@ class Deoptimizer : public Malloced { void DoComputeCompiledStubFrame(TranslationIterator* iterator, int frame_index); + // Translate object, store the result into an auxiliary array + // (deferred_objects_tagged_values_). void DoTranslateObject(TranslationIterator* iterator, int object_index, int field_index); + // Translate value, store the result into the given frame slot. void DoTranslateCommand(TranslationIterator* iterator, int frame_index, unsigned output_offset); + // Translate object, do not store the result anywhere (but do update + // the deferred materialization array). + void DoTranslateObjectAndSkip(TranslationIterator* iterator); + unsigned ComputeInputFrameSize() const; unsigned ComputeFixedSize(JSFunction* function) const; @@ -644,18 +625,14 @@ class DeoptimizerData { explicit DeoptimizerData(MemoryAllocator* allocator); ~DeoptimizerData(); -#ifdef ENABLE_DEBUGGER_SUPPORT void Iterate(ObjectVisitor* v); -#endif private: MemoryAllocator* allocator_; int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry]; MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry]; -#ifdef ENABLE_DEBUGGER_SUPPORT DeoptimizedFrameInfo* deoptimized_frame_info_; -#endif Deoptimizer* current_; @@ -919,7 +896,6 @@ class MaterializedObjectStore { }; -#ifdef ENABLE_DEBUGGER_SUPPORT // Class used to represent an unoptimized frame when the debugger // needs to inspect a frame that is part of an optimized frame. The // internally used FrameDescription objects are not GC safe so for use @@ -993,7 +969,6 @@ class DeoptimizedFrameInfo : public Malloced { friend class Deoptimizer; }; -#endif } } // namespace v8::internal diff --git a/deps/v8/src/disasm.h b/deps/v8/src/disasm.h index f7f2d4120..89b7fc261 100644 --- a/deps/v8/src/disasm.h +++ b/deps/v8/src/disasm.h @@ -1,29 +1,6 @@ // Copyright 2007-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DISASM_H_ #define V8_DISASM_H_ diff --git a/deps/v8/src/disassembler.cc b/deps/v8/src/disassembler.cc index 2af64228c..754d5a77a 100644 --- a/deps/v8/src/disassembler.cc +++ b/deps/v8/src/disassembler.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/disassembler.h b/deps/v8/src/disassembler.h index 878915003..f5f596efc 100644 --- a/deps/v8/src/disassembler.h +++ b/deps/v8/src/disassembler.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DISASSEMBLER_H_ #define V8_DISASSEMBLER_H_ diff --git a/deps/v8/src/diy-fp.cc b/deps/v8/src/diy-fp.cc index 491387770..51f75abb7 100644 --- a/deps/v8/src/diy-fp.cc +++ b/deps/v8/src/diy-fp.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "../include/v8stdint.h" #include "globals.h" diff --git a/deps/v8/src/diy-fp.h b/deps/v8/src/diy-fp.h index 26ff1a20b..f8f2673c4 100644 --- a/deps/v8/src/diy-fp.h +++ b/deps/v8/src/diy-fp.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DIY_FP_H_ #define V8_DIY_FP_H_ diff --git a/deps/v8/src/double.h b/deps/v8/src/double.h index fcf6906af..9fb7f843a 100644 --- a/deps/v8/src/double.h +++ b/deps/v8/src/double.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DOUBLE_H_ #define V8_DOUBLE_H_ diff --git a/deps/v8/src/dtoa.cc b/deps/v8/src/dtoa.cc index bda67205c..42a95ed96 100644 --- a/deps/v8/src/dtoa.cc +++ b/deps/v8/src/dtoa.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <cmath> diff --git a/deps/v8/src/dtoa.h b/deps/v8/src/dtoa.h index 948a07919..dd88688d0 100644 --- a/deps/v8/src/dtoa.h +++ b/deps/v8/src/dtoa.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_DTOA_H_ #define V8_DTOA_H_ diff --git a/deps/v8/src/effects.h b/deps/v8/src/effects.h index 5d980f54e..9360fda04 100644 --- a/deps/v8/src/effects.h +++ b/deps/v8/src/effects.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EFFECTS_H_ #define V8_EFFECTS_H_ diff --git a/deps/v8/src/elements-kind.cc b/deps/v8/src/elements-kind.cc index ff458e0ea..adab39679 100644 --- a/deps/v8/src/elements-kind.cc +++ b/deps/v8/src/elements-kind.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "elements-kind.h" @@ -172,28 +149,55 @@ ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, } +static bool IsTypedArrayElementsKind(ElementsKind elements_kind) { + return IsFixedTypedArrayElementsKind(elements_kind) || + IsExternalArrayElementsKind(elements_kind); +} + + +static inline bool IsFastTransitionTarget(ElementsKind elements_kind) { + return IsFastElementsKind(elements_kind) || + elements_kind == DICTIONARY_ELEMENTS; +} + bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind) { - switch (from_kind) { - case FAST_SMI_ELEMENTS: - return to_kind != FAST_SMI_ELEMENTS; - case FAST_HOLEY_SMI_ELEMENTS: - return to_kind != FAST_SMI_ELEMENTS && - to_kind != FAST_HOLEY_SMI_ELEMENTS; - case FAST_DOUBLE_ELEMENTS: - return to_kind != FAST_SMI_ELEMENTS && - to_kind != FAST_HOLEY_SMI_ELEMENTS && - to_kind != FAST_DOUBLE_ELEMENTS; - case FAST_HOLEY_DOUBLE_ELEMENTS: - return to_kind == FAST_ELEMENTS || - to_kind == FAST_HOLEY_ELEMENTS; - case FAST_ELEMENTS: - return to_kind == FAST_HOLEY_ELEMENTS; - case FAST_HOLEY_ELEMENTS: - return false; - default: - return false; + if (IsTypedArrayElementsKind(from_kind) || + IsTypedArrayElementsKind(to_kind)) { + switch (from_kind) { +#define FIXED_TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ + case TYPE##_ELEMENTS: \ + return to_kind == EXTERNAL_##TYPE##_ELEMENTS; + + TYPED_ARRAYS(FIXED_TYPED_ARRAY_CASE); +#undef FIXED_TYPED_ARRAY_CASE + default: + return false; + } + } + if (IsFastElementsKind(from_kind) && IsFastTransitionTarget(to_kind)) { + switch (from_kind) { + case FAST_SMI_ELEMENTS: + return to_kind != FAST_SMI_ELEMENTS; + case FAST_HOLEY_SMI_ELEMENTS: + return to_kind != FAST_SMI_ELEMENTS && + to_kind != FAST_HOLEY_SMI_ELEMENTS; + case FAST_DOUBLE_ELEMENTS: + return to_kind != FAST_SMI_ELEMENTS && + to_kind != FAST_HOLEY_SMI_ELEMENTS && + to_kind != FAST_DOUBLE_ELEMENTS; + case FAST_HOLEY_DOUBLE_ELEMENTS: + return to_kind == FAST_ELEMENTS || + to_kind == FAST_HOLEY_ELEMENTS; + case FAST_ELEMENTS: + return to_kind == FAST_HOLEY_ELEMENTS; + case FAST_HOLEY_ELEMENTS: + return false; + default: + return false; + } } + return false; } diff --git a/deps/v8/src/elements-kind.h b/deps/v8/src/elements-kind.h index d2605e8b0..1a550b0a3 100644 --- a/deps/v8/src/elements-kind.h +++ b/deps/v8/src/elements-kind.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ELEMENTS_KIND_H_ #define V8_ELEMENTS_KIND_H_ diff --git a/deps/v8/src/elements.cc b/deps/v8/src/elements.cc index 0624a0362..580a7186b 100644 --- a/deps/v8/src/elements.cc +++ b/deps/v8/src/elements.cc @@ -1,37 +1,14 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "arguments.h" -#include "objects.h" +#include "conversions.h" #include "elements.h" +#include "objects.h" #include "utils.h" -#include "v8conversions.h" // Each concrete ElementsAccessor can handle exactly one ElementsKind, // several abstract ElementsAccessor classes are used to allow sharing @@ -146,7 +123,9 @@ ELEMENTS_LIST(ELEMENTS_TRAITS) ElementsAccessor** ElementsAccessor::elements_accessors_; -static bool HasKey(FixedArray* array, Object* key) { +static bool HasKey(Handle<FixedArray> array, Handle<Object> key_handle) { + DisallowHeapAllocation no_gc; + Object* key = *key_handle; int len0 = array->length(); for (int i = 0; i < len0; i++) { Object* element = array->get(i); @@ -160,11 +139,11 @@ static bool HasKey(FixedArray* array, Object* key) { } -static Handle<Object> ThrowArrayLengthRangeError(Isolate* isolate) { - isolate->Throw( - *isolate->factory()->NewRangeError("invalid_array_length", - HandleVector<Object>(NULL, 0))); - return Handle<Object>(); +MUST_USE_RESULT +static MaybeHandle<Object> ThrowArrayLengthRangeError(Isolate* isolate) { + return isolate->Throw<Object>( + isolate->factory()->NewRangeError("invalid_array_length", + HandleVector<Object>(NULL, 0))); } @@ -302,15 +281,16 @@ static void CopyDoubleToObjectElements(Handle<FixedArrayBase> from_base, ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && (copy_size + static_cast<int>(from_start)) <= from_base->length()); if (copy_size == 0) return; + Isolate* isolate = from_base->GetIsolate(); Handle<FixedDoubleArray> from = Handle<FixedDoubleArray>::cast(from_base); Handle<FixedArray> to = Handle<FixedArray>::cast(to_base); for (int i = 0; i < copy_size; ++i) { - HandleScope scope(from_base->GetIsolate()); + HandleScope scope(isolate); if (IsFastSmiElementsKind(to_kind)) { UNIMPLEMENTED(); } else { ASSERT(IsFastObjectElementsKind(to_kind)); - Handle<Object> value = from->get_as_handle(i + from_start); + Handle<Object> value = FixedDoubleArray::get(from, i + from_start); to->set(i + to_start, *value, UPDATE_WRITE_BARRIER); } } @@ -517,12 +497,13 @@ static void TraceTopFrame(Isolate* isolate) { } -void CheckArrayAbuse(JSObject* obj, const char* op, uint32_t key, +void CheckArrayAbuse(Handle<JSObject> obj, const char* op, uint32_t key, bool allow_appending) { + DisallowHeapAllocation no_allocation; Object* raw_length = NULL; const char* elements_type = "array"; if (obj->IsJSArray()) { - JSArray* array = JSArray::cast(obj); + JSArray* array = JSArray::cast(*obj); raw_length = array->length(); } else { raw_length = Smi::FromInt(obj->elements()->length()); @@ -587,19 +568,17 @@ class ElementsAccessorBase : public ElementsAccessor { return ElementsTraits::Kind; } - static void ValidateContents(JSObject* holder, int length) { + static void ValidateContents(Handle<JSObject> holder, int length) { } - static void ValidateImpl(JSObject* holder) { - FixedArrayBase* fixed_array_base = holder->elements(); - // When objects are first allocated, its elements are Failures. - if (fixed_array_base->IsFailure()) return; + static void ValidateImpl(Handle<JSObject> holder) { + Handle<FixedArrayBase> fixed_array_base(holder->elements()); if (!fixed_array_base->IsHeapObject()) return; // Arrays that have been shifted in place can't be verified. if (fixed_array_base->IsFiller()) return; int length = 0; if (holder->IsJSArray()) { - Object* length_obj = JSArray::cast(holder)->length(); + Object* length_obj = Handle<JSArray>::cast(holder)->length(); if (length_obj->IsSmi()) { length = Smi::cast(length_obj)->value(); } @@ -609,51 +588,33 @@ class ElementsAccessorBase : public ElementsAccessor { ElementsAccessorSubclass::ValidateContents(holder, length); } - virtual void Validate(JSObject* holder) V8_FINAL V8_OVERRIDE { + virtual void Validate(Handle<JSObject> holder) V8_FINAL V8_OVERRIDE { + DisallowHeapAllocation no_gc; ElementsAccessorSubclass::ValidateImpl(holder); } - static bool HasElementImpl(Object* receiver, - JSObject* holder, + static bool HasElementImpl(Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { return ElementsAccessorSubclass::GetAttributesImpl( receiver, holder, key, backing_store) != ABSENT; } - virtual bool HasElement(Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE { - if (backing_store == NULL) { - backing_store = holder->elements(); - } + virtual bool HasElement( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key, + Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::HasElementImpl( receiver, holder, key, backing_store); } - // TODO(ishell): Temporary wrapper until handlified. - MUST_USE_RESULT virtual Handle<Object> Get( + MUST_USE_RESULT virtual MaybeHandle<Object> Get( Handle<Object> receiver, Handle<JSObject> holder, uint32_t key, Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { - CALL_HEAP_FUNCTION(holder->GetIsolate(), - Get(*receiver, *holder, key, - backing_store.is_null() - ? NULL : *backing_store), - Object); - } - - MUST_USE_RESULT virtual MaybeObject* Get( - Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE { - if (backing_store == NULL) { - backing_store = holder->elements(); - } - if (!IsExternalArrayElementsKind(ElementsTraits::Kind) && FLAG_trace_js_array_abuse) { CheckArrayAbuse(holder, "elements read", key); @@ -668,90 +629,87 @@ class ElementsAccessorBase : public ElementsAccessor { receiver, holder, key, backing_store); } - MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - FixedArrayBase* backing_store) { - return (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store)) - ? BackingStore::cast(backing_store)->get(key) - : backing_store->GetHeap()->the_hole_value(); + MUST_USE_RESULT static MaybeHandle<Object> GetImpl( + Handle<Object> receiver, + Handle<JSObject> obj, + uint32_t key, + Handle<FixedArrayBase> backing_store) { + if (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store)) { + return BackingStore::get(Handle<BackingStore>::cast(backing_store), key); + } else { + return backing_store->GetIsolate()->factory()->the_hole_value(); + } } MUST_USE_RESULT virtual PropertyAttributes GetAttributes( - Object* receiver, - JSObject* holder, + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE { - if (backing_store == NULL) { - backing_store = holder->elements(); - } + Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::GetAttributesImpl( receiver, holder, key, backing_store); } MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { if (key >= ElementsAccessorSubclass::GetCapacityImpl(backing_store)) { return ABSENT; } - return BackingStore::cast(backing_store)->is_the_hole(key) ? ABSENT : NONE; + return + Handle<BackingStore>::cast(backing_store)->is_the_hole(key) + ? ABSENT : NONE; } MUST_USE_RESULT virtual PropertyType GetType( - Object* receiver, - JSObject* holder, + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE { - if (backing_store == NULL) { - backing_store = holder->elements(); - } + Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::GetTypeImpl( receiver, holder, key, backing_store); } MUST_USE_RESULT static PropertyType GetTypeImpl( - Object* receiver, - JSObject* obj, - uint32_t key, - FixedArrayBase* backing_store) { + Handle<Object> receiver, + Handle<JSObject> obj, + uint32_t key, + Handle<FixedArrayBase> backing_store) { if (key >= ElementsAccessorSubclass::GetCapacityImpl(backing_store)) { return NONEXISTENT; } - return BackingStore::cast(backing_store)->is_the_hole(key) - ? NONEXISTENT : FIELD; + return + Handle<BackingStore>::cast(backing_store)->is_the_hole(key) + ? NONEXISTENT : FIELD; } - MUST_USE_RESULT virtual AccessorPair* GetAccessorPair( - Object* receiver, - JSObject* holder, + MUST_USE_RESULT virtual MaybeHandle<AccessorPair> GetAccessorPair( + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE { - if (backing_store == NULL) { - backing_store = holder->elements(); - } + Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::GetAccessorPairImpl( receiver, holder, key, backing_store); } - MUST_USE_RESULT static AccessorPair* GetAccessorPairImpl( - Object* receiver, - JSObject* obj, - uint32_t key, - FixedArrayBase* backing_store) { - return NULL; + MUST_USE_RESULT static MaybeHandle<AccessorPair> GetAccessorPairImpl( + Handle<Object> receiver, + Handle<JSObject> obj, + uint32_t key, + Handle<FixedArrayBase> backing_store) { + return MaybeHandle<AccessorPair>(); } - MUST_USE_RESULT virtual Handle<Object> SetLength( + MUST_USE_RESULT virtual MaybeHandle<Object> SetLength( Handle<JSArray> array, Handle<Object> length) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::SetLengthImpl( array, length, handle(array->elements())); } - MUST_USE_RESULT static Handle<Object> SetLengthImpl( + MUST_USE_RESULT static MaybeHandle<Object> SetLengthImpl( Handle<JSObject> obj, Handle<Object> length, Handle<FixedArrayBase> backing_store); @@ -771,7 +729,7 @@ class ElementsAccessorBase : public ElementsAccessor { UNIMPLEMENTED(); } - MUST_USE_RESULT virtual Handle<Object> Delete( + MUST_USE_RESULT virtual MaybeHandle<Object> Delete( Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) V8_OVERRIDE = 0; @@ -787,38 +745,45 @@ class ElementsAccessorBase : public ElementsAccessor { } virtual void CopyElements( - Handle<JSObject> from_holder, + Handle<FixedArrayBase> from, uint32_t from_start, ElementsKind from_kind, Handle<FixedArrayBase> to, uint32_t to_start, - int copy_size, - Handle<FixedArrayBase> from) V8_FINAL V8_OVERRIDE { - int packed_size = kPackedSizeNotKnown; - if (from.is_null()) { - from = handle(from_holder->elements()); - } + int copy_size) V8_FINAL V8_OVERRIDE { + ASSERT(!from.is_null()); + ElementsAccessorSubclass::CopyElementsImpl( + from, from_start, to, from_kind, to_start, kPackedSizeNotKnown, + copy_size); + } - if (!from_holder.is_null()) { - bool is_packed = IsFastPackedElementsKind(from_kind) && - from_holder->IsJSArray(); - if (is_packed) { - packed_size = - Smi::cast(Handle<JSArray>::cast(from_holder)->length())->value(); - if (copy_size >= 0 && packed_size > copy_size) { - packed_size = copy_size; - } + virtual void CopyElements( + JSObject* from_holder, + uint32_t from_start, + ElementsKind from_kind, + Handle<FixedArrayBase> to, + uint32_t to_start, + int copy_size) V8_FINAL V8_OVERRIDE { + int packed_size = kPackedSizeNotKnown; + bool is_packed = IsFastPackedElementsKind(from_kind) && + from_holder->IsJSArray(); + if (is_packed) { + packed_size = + Smi::cast(JSArray::cast(from_holder)->length())->value(); + if (copy_size >= 0 && packed_size > copy_size) { + packed_size = copy_size; } } + Handle<FixedArrayBase> from(from_holder->elements()); ElementsAccessorSubclass::CopyElementsImpl( from, from_start, to, from_kind, to_start, packed_size, copy_size); } - MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( - Object* receiver, - JSObject* holder, - FixedArray* to, - FixedArrayBase* from) V8_FINAL V8_OVERRIDE { + virtual MaybeHandle<FixedArray> AddElementsToFixedArray( + Handle<Object> receiver, + Handle<JSObject> holder, + Handle<FixedArray> to, + Handle<FixedArrayBase> from) V8_FINAL V8_OVERRIDE { int len0 = to->length(); #ifdef ENABLE_SLOW_ASSERTS if (FLAG_enable_slow_asserts) { @@ -827,25 +792,26 @@ class ElementsAccessorBase : public ElementsAccessor { } } #endif - if (from == NULL) { - from = holder->elements(); - } // Optimize if 'other' is empty. // We cannot optimize if 'this' is empty, as other may have holes. uint32_t len1 = ElementsAccessorSubclass::GetCapacityImpl(from); if (len1 == 0) return to; + Isolate* isolate = from->GetIsolate(); + // Compute how many elements are not in other. uint32_t extra = 0; for (uint32_t y = 0; y < len1; y++) { uint32_t key = ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); if (ElementsAccessorSubclass::HasElementImpl( receiver, holder, key, from)) { - MaybeObject* maybe_value = - ElementsAccessorSubclass::GetImpl(receiver, holder, key, from); - Object* value; - if (!maybe_value->To(&value)) return maybe_value; + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, + ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), + FixedArray); + ASSERT(!value->IsTheHole()); if (!HasKey(to, value)) { extra++; @@ -856,9 +822,7 @@ class ElementsAccessorBase : public ElementsAccessor { if (extra == 0) return to; // Allocate the result - FixedArray* result; - MaybeObject* maybe_obj = from->GetHeap()->AllocateFixedArray(len0 + extra); - if (!maybe_obj->To(&result)) return maybe_obj; + Handle<FixedArray> result = isolate->factory()->NewFixedArray(len0 + extra); // Fill in the content { @@ -877,12 +841,13 @@ class ElementsAccessorBase : public ElementsAccessor { ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); if (ElementsAccessorSubclass::HasElementImpl( receiver, holder, key, from)) { - MaybeObject* maybe_value = - ElementsAccessorSubclass::GetImpl(receiver, holder, key, from); - Object* value; - if (!maybe_value->To(&value)) return maybe_value; + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, + ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), + FixedArray); if (!value->IsTheHole() && !HasKey(to, value)) { - result->set(len0 + index, value); + result->set(len0 + index, *value); index++; } } @@ -892,21 +857,21 @@ class ElementsAccessorBase : public ElementsAccessor { } protected: - static uint32_t GetCapacityImpl(FixedArrayBase* backing_store) { + static uint32_t GetCapacityImpl(Handle<FixedArrayBase> backing_store) { return backing_store->length(); } - virtual uint32_t GetCapacity(FixedArrayBase* backing_store) + virtual uint32_t GetCapacity(Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::GetCapacityImpl(backing_store); } - static uint32_t GetKeyForIndexImpl(FixedArrayBase* backing_store, + static uint32_t GetKeyForIndexImpl(Handle<FixedArrayBase> backing_store, uint32_t index) { return index; } - virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store, + virtual uint32_t GetKeyForIndex(Handle<FixedArrayBase> backing_store, uint32_t index) V8_FINAL V8_OVERRIDE { return ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, index); } @@ -962,11 +927,15 @@ class FastElementsAccessor if (length == 0) { array->initialize_elements(); } else { - backing_store->set_length(length); + int filler_size = (old_capacity - length) * ElementSize; Address filler_start = backing_store->address() + BackingStore::OffsetOfElementAt(length); - int filler_size = (old_capacity - length) * ElementSize; array->GetHeap()->CreateFillerObjectAt(filler_start, filler_size); + + // We are storing the new length using release store after creating a + // filler for the left-over space to avoid races with the sweeper + // thread. + backing_store->synchronized_set_length(length); } } else { // Otherwise, fill the unused tail with holes. @@ -984,7 +953,7 @@ class FastElementsAccessor if (!array->ShouldConvertToSlowElements(new_capacity)) { FastElementsAccessorSubclass:: SetFastElementsCapacityAndLength(array, new_capacity, length); - array->ValidateElements(); + JSObject::ValidateElements(array); return length_object; } @@ -1009,7 +978,8 @@ class FastElementsAccessor backing_store->map() == heap->sloppy_arguments_elements_map(); if (is_sloppy_arguments_elements_map) { backing_store = handle( - BackingStore::cast(Handle<FixedArray>::cast(backing_store)->get(1))); + BackingStore::cast(Handle<FixedArray>::cast(backing_store)->get(1)), + isolate); } uint32_t length = static_cast<uint32_t>( obj->IsJSArray() @@ -1050,7 +1020,7 @@ class FastElementsAccessor return isolate->factory()->true_value(); } - virtual Handle<Object> Delete( + virtual MaybeHandle<Object> Delete( Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE { @@ -1058,31 +1028,34 @@ class FastElementsAccessor } static bool HasElementImpl( - Object* receiver, - JSObject* holder, + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { if (key >= static_cast<uint32_t>(backing_store->length())) { return false; } - return !BackingStore::cast(backing_store)->is_the_hole(key); + return !Handle<BackingStore>::cast(backing_store)->is_the_hole(key); } - static void ValidateContents(JSObject* holder, int length) { + static void ValidateContents(Handle<JSObject> holder, int length) { #if DEBUG - FixedArrayBase* elements = holder->elements(); - Heap* heap = elements->GetHeap(); + Isolate* isolate = holder->GetIsolate(); + HandleScope scope(isolate); + Handle<FixedArrayBase> elements(holder->elements(), isolate); Map* map = elements->map(); ASSERT((IsFastSmiOrObjectElementsKind(KindTraits::Kind) && - (map == heap->fixed_array_map() || - map == heap->fixed_cow_array_map())) || + (map == isolate->heap()->fixed_array_map() || + map == isolate->heap()->fixed_cow_array_map())) || (IsFastDoubleElementsKind(KindTraits::Kind) == - ((map == heap->fixed_array_map() && length == 0) || - map == heap->fixed_double_array_map()))); + ((map == isolate->heap()->fixed_array_map() && length == 0) || + map == isolate->heap()->fixed_double_array_map()))); + DisallowHeapAllocation no_gc; for (int i = 0; i < length; i++) { - BackingStore* backing_store = BackingStore::cast(elements); + HandleScope scope(isolate); + Handle<BackingStore> backing_store = Handle<BackingStore>::cast(elements); ASSERT((!IsFastSmiElementsKind(KindTraits::Kind) || - static_cast<Object*>(backing_store->get(i))->IsSmi()) || + BackingStore::get(backing_store, i)->IsSmi()) || (IsFastHoleyElementsKind(KindTraits::Kind) == backing_store->is_the_hole(i))); } @@ -1091,7 +1064,7 @@ class FastElementsAccessor }; -static inline ElementsKind ElementsKindForArray(FixedArrayBase* array) { +static inline ElementsKind ElementsKindForArray(Handle<FixedArrayBase> array) { switch (array->map()->instance_type()) { case FIXED_ARRAY_TYPE: if (array->IsDictionary()) { @@ -1162,7 +1135,7 @@ class FastSmiOrObjectElementsAccessor Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(from); Handle<FixedArrayBase> arguments( FixedArrayBase::cast(parameter_map->get(1))); - ElementsKind from_kind = ElementsKindForArray(*arguments); + ElementsKind from_kind = ElementsKindForArray(arguments); CopyElementsImpl(arguments, from_start, to, from_kind, to_start, packed_size, copy_size); break; @@ -1345,37 +1318,39 @@ class TypedElementsAccessor friend class ElementsAccessorBase<AccessorClass, ElementsKindTraits<Kind> >; - MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - FixedArrayBase* backing_store) { - return - key < AccessorClass::GetCapacityImpl(backing_store) - ? BackingStore::cast(backing_store)->get(key) - : backing_store->GetHeap()->undefined_value(); + MUST_USE_RESULT static MaybeHandle<Object> GetImpl( + Handle<Object> receiver, + Handle<JSObject> obj, + uint32_t key, + Handle<FixedArrayBase> backing_store) { + if (key < AccessorClass::GetCapacityImpl(backing_store)) { + return BackingStore::get(Handle<BackingStore>::cast(backing_store), key); + } else { + return backing_store->GetIsolate()->factory()->undefined_value(); + } } MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { return key < AccessorClass::GetCapacityImpl(backing_store) ? NONE : ABSENT; } MUST_USE_RESULT static PropertyType GetTypeImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { return key < AccessorClass::GetCapacityImpl(backing_store) ? FIELD : NONEXISTENT; } - MUST_USE_RESULT static Handle<Object> SetLengthImpl( + MUST_USE_RESULT static MaybeHandle<Object> SetLengthImpl( Handle<JSObject> obj, Handle<Object> length, Handle<FixedArrayBase> backing_store) { @@ -1384,7 +1359,7 @@ class TypedElementsAccessor return obj; } - MUST_USE_RESULT virtual Handle<Object> Delete( + MUST_USE_RESULT virtual MaybeHandle<Object> Delete( Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE { @@ -1392,10 +1367,10 @@ class TypedElementsAccessor return obj->GetIsolate()->factory()->true_value(); } - static bool HasElementImpl(Object* receiver, - JSObject* holder, + static bool HasElementImpl(Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) { + Handle<FixedArrayBase> backing_store) { uint32_t capacity = AccessorClass::GetCapacityImpl(backing_store); return key < capacity; @@ -1430,13 +1405,14 @@ class DictionaryElementsAccessor // Adjusts the length of the dictionary backing store and returns the new // length according to ES5 section 15.4.5.2 behavior. - MUST_USE_RESULT static MaybeObject* SetLengthWithoutNormalize( - FixedArrayBase* store, - JSArray* array, - Object* length_object, + static Handle<Object> SetLengthWithoutNormalize( + Handle<FixedArrayBase> store, + Handle<JSArray> array, + Handle<Object> length_object, uint32_t length) { - SeededNumberDictionary* dict = SeededNumberDictionary::cast(store); - Heap* heap = array->GetHeap(); + Handle<SeededNumberDictionary> dict = + Handle<SeededNumberDictionary>::cast(store); + Isolate* isolate = array->GetIsolate(); int capacity = dict->Capacity(); uint32_t new_length = length; uint32_t old_length = static_cast<uint32_t>(array->length()->Number()); @@ -1444,6 +1420,7 @@ class DictionaryElementsAccessor // Find last non-deletable element in range of elements to be // deleted and adjust range accordingly. for (int i = 0; i < capacity; i++) { + DisallowHeapAllocation no_gc; Object* key = dict->KeyAt(i); if (key->IsNumber()) { uint32_t number = static_cast<uint32_t>(key->Number()); @@ -1454,8 +1431,7 @@ class DictionaryElementsAccessor } } if (new_length != length) { - MaybeObject* maybe_object = heap->NumberFromUint32(new_length); - if (!maybe_object->To(&length_object)) return maybe_object; + length_object = isolate->factory()->NewNumberFromUint(new_length); } } @@ -1463,13 +1439,12 @@ class DictionaryElementsAccessor // If the length of a slow array is reset to zero, we clear // the array and flush backing storage. This has the added // benefit that the array returns to fast mode. - Object* obj; - MaybeObject* maybe_obj = array->ResetElements(); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + JSObject::ResetElements(array); } else { + DisallowHeapAllocation no_gc; // Remove elements that should be deleted. int removed_entries = 0; - Object* the_hole_value = heap->the_hole_value(); + Handle<Object> the_hole_value = isolate->factory()->the_hole_value(); for (int i = 0; i < capacity; i++) { Object* key = dict->KeyAt(i); if (key->IsNumber()) { @@ -1487,71 +1462,46 @@ class DictionaryElementsAccessor return length_object; } - // TODO(ishell): Temporary wrapper until handlified. - MUST_USE_RESULT static Handle<Object> SetLengthWithoutNormalize( - Handle<FixedArrayBase> store, - Handle<JSArray> array, - Handle<Object> length_object, - uint32_t length) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - SetLengthWithoutNormalize( - *store, *array, *length_object, length), - Object); - } - - MUST_USE_RESULT static MaybeObject* DeleteCommon( - JSObject* obj, + MUST_USE_RESULT static MaybeHandle<Object> DeleteCommon( + Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) { Isolate* isolate = obj->GetIsolate(); - Heap* heap = isolate->heap(); - FixedArray* backing_store = FixedArray::cast(obj->elements()); + Handle<FixedArray> backing_store(FixedArray::cast(obj->elements()), + isolate); bool is_arguments = (obj->GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS); if (is_arguments) { - backing_store = FixedArray::cast(backing_store->get(1)); + backing_store = handle(FixedArray::cast(backing_store->get(1)), isolate); } - SeededNumberDictionary* dictionary = - SeededNumberDictionary::cast(backing_store); + Handle<SeededNumberDictionary> dictionary = + Handle<SeededNumberDictionary>::cast(backing_store); int entry = dictionary->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound) { - Object* result = dictionary->DeleteProperty(entry, mode); - if (result == heap->false_value()) { + Handle<Object> result = + SeededNumberDictionary::DeleteProperty(dictionary, entry, mode); + if (*result == *isolate->factory()->false_value()) { if (mode == JSObject::STRICT_DELETION) { // Deleting a non-configurable property in strict mode. - HandleScope scope(isolate); - Handle<Object> holder(obj, isolate); Handle<Object> name = isolate->factory()->NewNumberFromUint(key); - Handle<Object> args[2] = { name, holder }; + Handle<Object> args[2] = { name, obj }; Handle<Object> error = isolate->factory()->NewTypeError("strict_delete_property", HandleVector(args, 2)); - return isolate->Throw(*error); + return isolate->Throw<Object>(error); } - return heap->false_value(); - } - MaybeObject* maybe_elements = dictionary->Shrink(key); - FixedArray* new_elements = NULL; - if (!maybe_elements->To(&new_elements)) { - return maybe_elements; + return isolate->factory()->false_value(); } + Handle<FixedArray> new_elements = + SeededNumberDictionary::Shrink(dictionary, key); + if (is_arguments) { - FixedArray::cast(obj->elements())->set(1, new_elements); + FixedArray::cast(obj->elements())->set(1, *new_elements); } else { - obj->set_elements(new_elements); + obj->set_elements(*new_elements); } } - return heap->true_value(); - } - - // TODO(ishell): Temporary wrapper until handlified. - MUST_USE_RESULT static Handle<Object> DeleteCommon( - Handle<JSObject> obj, - uint32_t key, - JSReceiver::DeleteMode mode) { - CALL_HEAP_FUNCTION(obj->GetIsolate(), - DeleteCommon(*obj, key, mode), - Object); + return isolate->factory()->true_value(); } static void CopyElementsImpl(Handle<FixedArrayBase> from, @@ -1569,42 +1519,42 @@ class DictionaryElementsAccessor friend class ElementsAccessorBase<DictionaryElementsAccessor, ElementsKindTraits<DICTIONARY_ELEMENTS> >; - MUST_USE_RESULT virtual Handle<Object> Delete( + MUST_USE_RESULT virtual MaybeHandle<Object> Delete( Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE { return DeleteCommon(obj, key, mode); } - MUST_USE_RESULT static MaybeObject* GetImpl( - Object* receiver, - JSObject* obj, + MUST_USE_RESULT static MaybeHandle<Object> GetImpl( + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* store) { - SeededNumberDictionary* backing_store = SeededNumberDictionary::cast(store); + Handle<FixedArrayBase> store) { + Handle<SeededNumberDictionary> backing_store = + Handle<SeededNumberDictionary>::cast(store); + Isolate* isolate = backing_store->GetIsolate(); int entry = backing_store->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound) { - Object* element = backing_store->ValueAt(entry); + Handle<Object> element(backing_store->ValueAt(entry), isolate); PropertyDetails details = backing_store->DetailsAt(entry); if (details.type() == CALLBACKS) { - return obj->GetElementWithCallback(receiver, - element, - key, - obj); + return JSObject::GetElementWithCallback( + obj, receiver, element, key, obj); } else { return element; } } - return obj->GetHeap()->the_hole_value(); + return isolate->factory()->the_hole_value(); } MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* backing_store) { - SeededNumberDictionary* dictionary = - SeededNumberDictionary::cast(backing_store); + Handle<FixedArrayBase> backing_store) { + Handle<SeededNumberDictionary> dictionary = + Handle<SeededNumberDictionary>::cast(backing_store); int entry = dictionary->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound) { return dictionary->DetailsAt(entry).attributes(); @@ -1613,11 +1563,12 @@ class DictionaryElementsAccessor } MUST_USE_RESULT static PropertyType GetTypeImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* store) { - SeededNumberDictionary* backing_store = SeededNumberDictionary::cast(store); + Handle<FixedArrayBase> store) { + Handle<SeededNumberDictionary> backing_store = + Handle<SeededNumberDictionary>::cast(store); int entry = backing_store->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound) { return backing_store->DetailsAt(entry).type(); @@ -1625,32 +1576,36 @@ class DictionaryElementsAccessor return NONEXISTENT; } - MUST_USE_RESULT static AccessorPair* GetAccessorPairImpl( - Object* receiver, - JSObject* obj, + MUST_USE_RESULT static MaybeHandle<AccessorPair> GetAccessorPairImpl( + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* store) { - SeededNumberDictionary* backing_store = SeededNumberDictionary::cast(store); + Handle<FixedArrayBase> store) { + Handle<SeededNumberDictionary> backing_store = + Handle<SeededNumberDictionary>::cast(store); int entry = backing_store->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound && backing_store->DetailsAt(entry).type() == CALLBACKS && backing_store->ValueAt(entry)->IsAccessorPair()) { - return AccessorPair::cast(backing_store->ValueAt(entry)); + return handle(AccessorPair::cast(backing_store->ValueAt(entry))); } - return NULL; + return MaybeHandle<AccessorPair>(); } - static bool HasElementImpl(Object* receiver, - JSObject* holder, + static bool HasElementImpl(Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store) { - return SeededNumberDictionary::cast(backing_store)->FindEntry(key) != - SeededNumberDictionary::kNotFound; + Handle<FixedArrayBase> store) { + Handle<SeededNumberDictionary> backing_store = + Handle<SeededNumberDictionary>::cast(store); + return backing_store->FindEntry(key) != SeededNumberDictionary::kNotFound; } - static uint32_t GetKeyForIndexImpl(FixedArrayBase* store, + static uint32_t GetKeyForIndexImpl(Handle<FixedArrayBase> store, uint32_t index) { - SeededNumberDictionary* dict = SeededNumberDictionary::cast(store); + DisallowHeapAllocation no_gc; + Handle<SeededNumberDictionary> dict = + Handle<SeededNumberDictionary>::cast(store); Object* key = dict->KeyAt(index); return Smi::cast(key)->value(); } @@ -1670,31 +1625,38 @@ class SloppyArgumentsElementsAccessor : public ElementsAccessorBase< SloppyArgumentsElementsAccessor, ElementsKindTraits<SLOPPY_ARGUMENTS_ELEMENTS> >; - MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - FixedArrayBase* parameters) { - FixedArray* parameter_map = FixedArray::cast(parameters); - Object* probe = GetParameterMapArg(obj, parameter_map, key); + MUST_USE_RESULT static MaybeHandle<Object> GetImpl( + Handle<Object> receiver, + Handle<JSObject> obj, + uint32_t key, + Handle<FixedArrayBase> parameters) { + Isolate* isolate = obj->GetIsolate(); + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); + Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { + DisallowHeapAllocation no_gc; Context* context = Context::cast(parameter_map->get(0)); - int context_index = Smi::cast(probe)->value(); + int context_index = Handle<Smi>::cast(probe)->value(); ASSERT(!context->get(context_index)->IsTheHole()); - return context->get(context_index); + return handle(context->get(context_index), isolate); } else { // Object is not mapped, defer to the arguments. - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); - MaybeObject* maybe_result = ElementsAccessor::ForArray(arguments)->Get( - receiver, obj, key, arguments); - Object* result; - if (!maybe_result->ToObject(&result)) return maybe_result; + Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)), + isolate); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + ElementsAccessor::ForArray(arguments)->Get( + receiver, obj, key, arguments), + Object); // Elements of the arguments object in slow mode might be slow aliases. if (result->IsAliasedArgumentsEntry()) { - AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(result); + DisallowHeapAllocation no_gc; + AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(*result); Context* context = Context::cast(parameter_map->get(0)); int context_index = entry->aliased_context_slot(); ASSERT(!context->get(context_index)->IsTheHole()); - return context->get(context_index); + return handle(context->get(context_index), isolate); } else { return result; } @@ -1702,57 +1664,57 @@ class SloppyArgumentsElementsAccessor : public ElementsAccessorBase< } MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* backing_store) { - FixedArray* parameter_map = FixedArray::cast(backing_store); - Object* probe = GetParameterMapArg(obj, parameter_map, key); + Handle<FixedArrayBase> backing_store) { + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(backing_store); + Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { return NONE; } else { // If not aliased, check the arguments. - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); + Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); return ElementsAccessor::ForArray(arguments)->GetAttributes( receiver, obj, key, arguments); } } MUST_USE_RESULT static PropertyType GetTypeImpl( - Object* receiver, - JSObject* obj, + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* parameters) { - FixedArray* parameter_map = FixedArray::cast(parameters); - Object* probe = GetParameterMapArg(obj, parameter_map, key); + Handle<FixedArrayBase> parameters) { + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); + Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { return FIELD; } else { // If not aliased, check the arguments. - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); + Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); return ElementsAccessor::ForArray(arguments)->GetType( receiver, obj, key, arguments); } } - MUST_USE_RESULT static AccessorPair* GetAccessorPairImpl( - Object* receiver, - JSObject* obj, + MUST_USE_RESULT static MaybeHandle<AccessorPair> GetAccessorPairImpl( + Handle<Object> receiver, + Handle<JSObject> obj, uint32_t key, - FixedArrayBase* parameters) { - FixedArray* parameter_map = FixedArray::cast(parameters); - Object* probe = GetParameterMapArg(obj, parameter_map, key); + Handle<FixedArrayBase> parameters) { + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); + Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { - return NULL; + return MaybeHandle<AccessorPair>(); } else { // If not aliased, check the arguments. - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); + Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); return ElementsAccessor::ForArray(arguments)->GetAccessorPair( receiver, obj, key, arguments); } } - MUST_USE_RESULT static Handle<Object> SetLengthImpl( + MUST_USE_RESULT static MaybeHandle<Object> SetLengthImpl( Handle<JSObject> obj, Handle<Object> length, Handle<FixedArrayBase> parameter_map) { @@ -1762,7 +1724,7 @@ class SloppyArgumentsElementsAccessor : public ElementsAccessorBase< return obj; } - MUST_USE_RESULT virtual Handle<Object> Delete( + MUST_USE_RESULT virtual MaybeHandle<Object> Delete( Handle<JSObject> obj, uint32_t key, JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE { @@ -1798,47 +1760,42 @@ class SloppyArgumentsElementsAccessor : public ElementsAccessorBase< UNREACHABLE(); } - static uint32_t GetCapacityImpl(FixedArrayBase* backing_store) { - FixedArray* parameter_map = FixedArray::cast(backing_store); - FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1)); + static uint32_t GetCapacityImpl(Handle<FixedArrayBase> backing_store) { + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(backing_store); + Handle<FixedArrayBase> arguments( + FixedArrayBase::cast(parameter_map->get(1))); return Max(static_cast<uint32_t>(parameter_map->length() - 2), ForArray(arguments)->GetCapacity(arguments)); } - static uint32_t GetKeyForIndexImpl(FixedArrayBase* dict, + static uint32_t GetKeyForIndexImpl(Handle<FixedArrayBase> dict, uint32_t index) { return index; } - static bool HasElementImpl(Object* receiver, - JSObject* holder, + static bool HasElementImpl(Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* parameters) { - FixedArray* parameter_map = FixedArray::cast(parameters); - Object* probe = GetParameterMapArg(holder, parameter_map, key); + Handle<FixedArrayBase> parameters) { + Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); + Handle<Object> probe = GetParameterMapArg(holder, parameter_map, key); if (!probe->IsTheHole()) { return true; } else { - FixedArrayBase* arguments = - FixedArrayBase::cast(FixedArray::cast(parameter_map)->get(1)); + Isolate* isolate = holder->GetIsolate(); + Handle<FixedArrayBase> arguments(FixedArrayBase::cast( + Handle<FixedArray>::cast(parameter_map)->get(1)), isolate); ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments); - return !accessor->Get(receiver, holder, key, arguments)->IsTheHole(); + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, value, + accessor->Get(receiver, holder, key, arguments), + false); + return !value->IsTheHole(); } } private: - // TODO(ishell): remove when all usages are handlified. - static Object* GetParameterMapArg(JSObject* holder, - FixedArray* parameter_map, - uint32_t key) { - uint32_t length = holder->IsJSArray() - ? Smi::cast(JSArray::cast(holder)->length())->value() - : parameter_map->length(); - return key < (length - 2) - ? parameter_map->get(key + 2) - : parameter_map->GetHeap()->the_hole_value(); - } - static Handle<Object> GetParameterMapArg(Handle<JSObject> holder, Handle<FixedArray> parameter_map, uint32_t key) { @@ -1853,7 +1810,7 @@ class SloppyArgumentsElementsAccessor : public ElementsAccessorBase< }; -ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) { +ElementsAccessor* ElementsAccessor::ForArray(Handle<FixedArrayBase> array) { return elements_accessors_[ElementsKindForArray(array)]; } @@ -1881,8 +1838,9 @@ void ElementsAccessor::TearDown() { template <typename ElementsAccessorSubclass, typename ElementsKindTraits> -MUST_USE_RESULT Handle<Object> ElementsAccessorBase<ElementsAccessorSubclass, - ElementsKindTraits>:: +MUST_USE_RESULT +MaybeHandle<Object> ElementsAccessorBase<ElementsAccessorSubclass, + ElementsKindTraits>:: SetLengthImpl(Handle<JSObject> obj, Handle<Object> length, Handle<FixedArrayBase> backing_store) { @@ -1890,14 +1848,15 @@ MUST_USE_RESULT Handle<Object> ElementsAccessorBase<ElementsAccessorSubclass, Handle<JSArray> array = Handle<JSArray>::cast(obj); // Fast case: The new length fits into a Smi. - Handle<Object> smi_length = Object::ToSmi(isolate, length); + Handle<Object> smi_length; - if (!smi_length.is_null() && smi_length->IsSmi()) { + if (Object::ToSmi(isolate, length).ToHandle(&smi_length) && + smi_length->IsSmi()) { const int value = Handle<Smi>::cast(smi_length)->value(); if (value >= 0) { Handle<Object> new_length = ElementsAccessorSubclass:: SetLengthWithoutNormalize(backing_store, array, smi_length, value); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, new_length, new_length); + ASSERT(!new_length.is_null()); // even though the proposed length was a smi, new_length could // still be a heap number because SetLengthWithoutNormalize doesn't @@ -1924,11 +1883,11 @@ MUST_USE_RESULT Handle<Object> ElementsAccessorBase<ElementsAccessorSubclass, if (length->ToArrayIndex(&value)) { Handle<SeededNumberDictionary> dictionary = JSObject::NormalizeElements(array); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, dictionary, dictionary); + ASSERT(!dictionary.is_null()); Handle<Object> new_length = DictionaryElementsAccessor:: SetLengthWithoutNormalize(dictionary, array, length, value); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, new_length, new_length); + ASSERT(!new_length.is_null()); ASSERT(new_length->IsNumber()); array->set_length(*new_length); @@ -1947,8 +1906,8 @@ MUST_USE_RESULT Handle<Object> ElementsAccessorBase<ElementsAccessorSubclass, } -Handle<Object> ArrayConstructInitializeElements(Handle<JSArray> array, - Arguments* args) { +MaybeHandle<Object> ArrayConstructInitializeElements(Handle<JSArray> array, + Arguments* args) { // Optimize the case where there is one argument and the argument is a // small smi. if (args->length() == 1) { diff --git a/deps/v8/src/elements.h b/deps/v8/src/elements.h index 44644abd9..88f5db8c5 100644 --- a/deps/v8/src/elements.h +++ b/deps/v8/src/elements.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ELEMENTS_H_ #define V8_ELEMENTS_H_ @@ -48,35 +25,43 @@ class ElementsAccessor { // Checks the elements of an object for consistency, asserting when a problem // is found. - virtual void Validate(JSObject* obj) = 0; + virtual void Validate(Handle<JSObject> obj) = 0; // Returns true if a holder contains an element with the specified key // without iterating up the prototype chain. The caller can optionally pass // in the backing store to use for the check, which must be compatible with // the ElementsKind of the ElementsAccessor. If backing_store is NULL, the // holder->elements() is used as the backing store. - virtual bool HasElement(Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + virtual bool HasElement( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key, + Handle<FixedArrayBase> backing_store) = 0; + + inline bool HasElement( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key) { + return HasElement(receiver, holder, key, handle(holder->elements())); + } // Returns the element with the specified key or undefined if there is no such // element. This method doesn't iterate up the prototype chain. The caller // can optionally pass in the backing store to use for the check, which must // be compatible with the ElementsKind of the ElementsAccessor. If // backing_store is NULL, the holder->elements() is used as the backing store. - MUST_USE_RESULT virtual Handle<Object> Get( + MUST_USE_RESULT virtual MaybeHandle<Object> Get( Handle<Object> receiver, Handle<JSObject> holder, uint32_t key, - Handle<FixedArrayBase> backing_store = - Handle<FixedArrayBase>::null()) = 0; + Handle<FixedArrayBase> backing_store) = 0; - MUST_USE_RESULT virtual MaybeObject* Get( - Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + MUST_USE_RESULT inline MaybeHandle<Object> Get( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key) { + return Get(receiver, holder, key, handle(holder->elements())); + } // Returns an element's attributes, or ABSENT if there is no such // element. This method doesn't iterate up the prototype chain. The caller @@ -84,10 +69,17 @@ class ElementsAccessor { // be compatible with the ElementsKind of the ElementsAccessor. If // backing_store is NULL, the holder->elements() is used as the backing store. MUST_USE_RESULT virtual PropertyAttributes GetAttributes( - Object* receiver, - JSObject* holder, + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + Handle<FixedArrayBase> backing_store) = 0; + + MUST_USE_RESULT inline PropertyAttributes GetAttributes( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key) { + return GetAttributes(receiver, holder, key, handle(holder->elements())); + } // Returns an element's type, or NONEXISTENT if there is no such // element. This method doesn't iterate up the prototype chain. The caller @@ -95,28 +87,42 @@ class ElementsAccessor { // be compatible with the ElementsKind of the ElementsAccessor. If // backing_store is NULL, the holder->elements() is used as the backing store. MUST_USE_RESULT virtual PropertyType GetType( - Object* receiver, - JSObject* holder, + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + Handle<FixedArrayBase> backing_store) = 0; + + MUST_USE_RESULT inline PropertyType GetType( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key) { + return GetType(receiver, holder, key, handle(holder->elements())); + } // Returns an element's accessors, or NULL if the element does not exist or // is plain. This method doesn't iterate up the prototype chain. The caller // can optionally pass in the backing store to use for the check, which must // be compatible with the ElementsKind of the ElementsAccessor. If // backing_store is NULL, the holder->elements() is used as the backing store. - MUST_USE_RESULT virtual AccessorPair* GetAccessorPair( - Object* receiver, - JSObject* holder, + MUST_USE_RESULT virtual MaybeHandle<AccessorPair> GetAccessorPair( + Handle<Object> receiver, + Handle<JSObject> holder, uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + Handle<FixedArrayBase> backing_store) = 0; + + MUST_USE_RESULT inline MaybeHandle<AccessorPair> GetAccessorPair( + Handle<Object> receiver, + Handle<JSObject> holder, + uint32_t key) { + return GetAccessorPair(receiver, holder, key, handle(holder->elements())); + } // Modifies the length data property as specified for JSArrays and resizes the // underlying backing store accordingly. The method honors the semantics of // changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that // have non-deletable elements can only be shrunk to the size of highest // element that is non-deletable. - MUST_USE_RESULT virtual Handle<Object> SetLength( + MUST_USE_RESULT virtual MaybeHandle<Object> SetLength( Handle<JSArray> holder, Handle<Object> new_length) = 0; @@ -132,7 +138,7 @@ class ElementsAccessor { int length) = 0; // Deletes an element in an object, returning a new elements backing store. - MUST_USE_RESULT virtual Handle<Object> Delete( + MUST_USE_RESULT virtual MaybeHandle<Object> Delete( Handle<JSObject> holder, uint32_t key, JSReceiver::DeleteMode mode) = 0; @@ -151,28 +157,45 @@ class ElementsAccessor { // store is available, it can be passed in source and source_holder is // ignored. virtual void CopyElements( - Handle<JSObject> source_holder, + Handle<FixedArrayBase> source, + uint32_t source_start, + ElementsKind source_kind, + Handle<FixedArrayBase> destination, + uint32_t destination_start, + int copy_size) = 0; + + // TODO(ishell): Keeping |source_holder| parameter in a non-handlified form + // helps avoiding ArrayConcat() builtin performance degradation. + // Revisit this later. + virtual void CopyElements( + JSObject* source_holder, uint32_t source_start, ElementsKind source_kind, Handle<FixedArrayBase> destination, uint32_t destination_start, - int copy_size, - Handle<FixedArrayBase> source = Handle<FixedArrayBase>::null()) = 0; + int copy_size) = 0; - void CopyElements( + inline void CopyElements( Handle<JSObject> from_holder, Handle<FixedArrayBase> to, - ElementsKind from_kind, - Handle<FixedArrayBase> from = Handle<FixedArrayBase>::null()) { - CopyElements(from_holder, 0, from_kind, to, 0, - kCopyToEndAndInitializeToHole, from); + ElementsKind from_kind) { + CopyElements( + *from_holder, 0, from_kind, to, 0, kCopyToEndAndInitializeToHole); } - MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( - Object* receiver, - JSObject* holder, - FixedArray* to, - FixedArrayBase* from = NULL) = 0; + MUST_USE_RESULT virtual MaybeHandle<FixedArray> AddElementsToFixedArray( + Handle<Object> receiver, + Handle<JSObject> holder, + Handle<FixedArray> to, + Handle<FixedArrayBase> from) = 0; + + MUST_USE_RESULT inline MaybeHandle<FixedArray> AddElementsToFixedArray( + Handle<Object> receiver, + Handle<JSObject> holder, + Handle<FixedArray> to) { + return AddElementsToFixedArray( + receiver, holder, to, handle(holder->elements())); + } // Returns a shared ElementsAccessor for the specified ElementsKind. static ElementsAccessor* ForKind(ElementsKind elements_kind) { @@ -180,7 +203,7 @@ class ElementsAccessor { return elements_accessors_[elements_kind]; } - static ElementsAccessor* ForArray(FixedArrayBase* array); + static ElementsAccessor* ForArray(Handle<FixedArrayBase> array); static void InitializeOncePerProcess(); static void TearDown(); @@ -188,7 +211,7 @@ class ElementsAccessor { protected: friend class SloppyArgumentsElementsAccessor; - virtual uint32_t GetCapacity(FixedArrayBase* backing_store) = 0; + virtual uint32_t GetCapacity(Handle<FixedArrayBase> backing_store) = 0; // Element handlers distinguish between indexes and keys when they manipulate // elements. Indexes refer to elements in terms of their location in the @@ -198,7 +221,7 @@ class ElementsAccessor { // keys are equivalent to indexes, and GetKeyForIndex returns the same value // it is passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps // the index to a key using the KeyAt method on the NumberDictionary. - virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store, + virtual uint32_t GetKeyForIndex(Handle<FixedArrayBase> backing_store, uint32_t index) = 0; private: @@ -208,11 +231,12 @@ class ElementsAccessor { DISALLOW_COPY_AND_ASSIGN(ElementsAccessor); }; -void CheckArrayAbuse(JSObject* obj, const char* op, uint32_t key, +void CheckArrayAbuse(Handle<JSObject> obj, const char* op, uint32_t key, bool allow_appending = false); -Handle<Object> ArrayConstructInitializeElements(Handle<JSArray> array, - Arguments* args); +MUST_USE_RESULT MaybeHandle<Object> ArrayConstructInitializeElements( + Handle<JSArray> array, + Arguments* args); } } // namespace v8::internal diff --git a/deps/v8/src/execution.cc b/deps/v8/src/execution.cc index 7442d1732..b74ef4d62 100644 --- a/deps/v8/src/execution.cc +++ b/deps/v8/src/execution.cc @@ -1,49 +1,18 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include <stdlib.h> - -#include "v8.h" - -#include "api.h" +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "execution.h" + #include "bootstrapper.h" #include "codegen.h" -#include "debug.h" #include "deoptimizer.h" #include "isolate-inl.h" -#include "runtime-profiler.h" -#include "simulator.h" -#include "v8threads.h" #include "vm-state-inl.h" namespace v8 { namespace internal { - StackGuard::StackGuard() : isolate_(NULL) { } @@ -67,12 +36,12 @@ void StackGuard::reset_limits(const ExecutionAccess& lock) { } -static Handle<Object> Invoke(bool is_construct, - Handle<JSFunction> function, - Handle<Object> receiver, - int argc, - Handle<Object> args[], - bool* has_pending_exception) { +MUST_USE_RESULT static MaybeHandle<Object> Invoke( + bool is_construct, + Handle<JSFunction> function, + Handle<Object> receiver, + int argc, + Handle<Object> args[]) { Isolate* isolate = function->GetIsolate(); // Entering JavaScript. @@ -80,13 +49,12 @@ static Handle<Object> Invoke(bool is_construct, CHECK(AllowJavascriptExecution::IsAllowed(isolate)); if (!ThrowOnJavascriptExecution::IsAllowed(isolate)) { isolate->ThrowIllegalOperation(); - *has_pending_exception = true; isolate->ReportPendingMessages(); - return Handle<Object>(); + return MaybeHandle<Object>(); } // Placeholder for return value. - MaybeObject* value = reinterpret_cast<Object*>(kZapValue); + Object* value = NULL; typedef Object* (*JSEntryFunction)(byte* entry, Object* function, @@ -127,41 +95,36 @@ static Handle<Object> Invoke(bool is_construct, } #ifdef VERIFY_HEAP - value->Verify(); + value->ObjectVerify(); #endif // Update the pending exception flag and return the value. - *has_pending_exception = value->IsException(); - ASSERT(*has_pending_exception == isolate->has_pending_exception()); - if (*has_pending_exception) { + bool has_exception = value->IsException(); + ASSERT(has_exception == isolate->has_pending_exception()); + if (has_exception) { isolate->ReportPendingMessages(); -#ifdef ENABLE_DEBUGGER_SUPPORT // Reset stepping state when script exits with uncaught exception. if (isolate->debugger()->IsDebuggerActive()) { isolate->debug()->ClearStepping(); } -#endif // ENABLE_DEBUGGER_SUPPORT - return Handle<Object>(); + return MaybeHandle<Object>(); } else { isolate->clear_pending_message(); } - return Handle<Object>(value->ToObjectUnchecked(), isolate); + return Handle<Object>(value, isolate); } -Handle<Object> Execution::Call(Isolate* isolate, - Handle<Object> callable, - Handle<Object> receiver, - int argc, - Handle<Object> argv[], - bool* pending_exception, - bool convert_receiver) { - *pending_exception = false; - +MaybeHandle<Object> Execution::Call(Isolate* isolate, + Handle<Object> callable, + Handle<Object> receiver, + int argc, + Handle<Object> argv[], + bool convert_receiver) { if (!callable->IsJSFunction()) { - callable = TryGetFunctionDelegate(isolate, callable, pending_exception); - if (*pending_exception) return callable; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, callable, TryGetFunctionDelegate(isolate, callable), Object); } Handle<JSFunction> func = Handle<JSFunction>::cast(callable); @@ -178,29 +141,27 @@ Handle<Object> Execution::Call(Isolate* isolate, receiver = Handle<Object>(global, func->GetIsolate()); } } else { - receiver = ToObject(isolate, receiver, pending_exception); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, receiver, ToObject(isolate, receiver), Object); } - if (*pending_exception) return callable; } - return Invoke(false, func, receiver, argc, argv, pending_exception); + return Invoke(false, func, receiver, argc, argv); } -Handle<Object> Execution::New(Handle<JSFunction> func, - int argc, - Handle<Object> argv[], - bool* pending_exception) { - return Invoke(true, func, func->GetIsolate()->global_object(), argc, argv, - pending_exception); +MaybeHandle<Object> Execution::New(Handle<JSFunction> func, + int argc, + Handle<Object> argv[]) { + return Invoke(true, func, func->GetIsolate()->global_object(), argc, argv); } -Handle<Object> Execution::TryCall(Handle<JSFunction> func, - Handle<Object> receiver, - int argc, - Handle<Object> args[], - bool* caught_exception) { +MaybeHandle<Object> Execution::TryCall(Handle<JSFunction> func, + Handle<Object> receiver, + int argc, + Handle<Object> args[], + Handle<Object>* exception_out) { // Enter a try-block while executing the JavaScript code. To avoid // duplicate error printing it must be non-verbose. Also, to avoid // creating message objects during stack overflow we shouldn't @@ -208,30 +169,30 @@ Handle<Object> Execution::TryCall(Handle<JSFunction> func, v8::TryCatch catcher; catcher.SetVerbose(false); catcher.SetCaptureMessage(false); - *caught_exception = false; // Get isolate now, because handle might be persistent // and get destroyed in the next call. Isolate* isolate = func->GetIsolate(); - Handle<Object> result = Invoke(false, func, receiver, argc, args, - caught_exception); + MaybeHandle<Object> maybe_result = Invoke(false, func, receiver, argc, args); - if (*caught_exception) { + if (maybe_result.is_null()) { ASSERT(catcher.HasCaught()); ASSERT(isolate->has_pending_exception()); ASSERT(isolate->external_caught_exception()); - if (isolate->pending_exception() == - isolate->heap()->termination_exception()) { - result = isolate->factory()->termination_exception(); - } else { - result = v8::Utils::OpenHandle(*catcher.Exception()); + if (exception_out != NULL) { + if (isolate->pending_exception() == + isolate->heap()->termination_exception()) { + *exception_out = isolate->factory()->termination_exception(); + } else { + *exception_out = v8::Utils::OpenHandle(*catcher.Exception()); + } } isolate->OptionalRescheduleException(true); } ASSERT(!isolate->has_pending_exception()); ASSERT(!isolate->external_caught_exception()); - return result; + return maybe_result; } @@ -262,9 +223,8 @@ Handle<Object> Execution::GetFunctionDelegate(Isolate* isolate, } -Handle<Object> Execution::TryGetFunctionDelegate(Isolate* isolate, - Handle<Object> object, - bool* has_pending_exception) { +MaybeHandle<Object> Execution::TryGetFunctionDelegate(Isolate* isolate, + Handle<Object> object) { ASSERT(!object->IsJSFunction()); // If object is a function proxy, get its handler. Iterate if necessary. @@ -286,10 +246,8 @@ Handle<Object> Execution::TryGetFunctionDelegate(Isolate* isolate, // throw a non-callable exception. i::Handle<i::Object> error_obj = isolate->factory()->NewTypeError( "called_non_callable", i::HandleVector<i::Object>(&object, 1)); - isolate->Throw(*error_obj); - *has_pending_exception = true; - return isolate->factory()->undefined_value(); + return isolate->Throw<Object>(error_obj); } @@ -319,10 +277,8 @@ Handle<Object> Execution::GetConstructorDelegate(Isolate* isolate, } -Handle<Object> Execution::TryGetConstructorDelegate( - Isolate* isolate, - Handle<Object> object, - bool* has_pending_exception) { +MaybeHandle<Object> Execution::TryGetConstructorDelegate( + Isolate* isolate, Handle<Object> object) { ASSERT(!object->IsJSFunction()); // If you return a function from here, it will be called when an @@ -347,38 +303,29 @@ Handle<Object> Execution::TryGetConstructorDelegate( // throw a non-callable exception. i::Handle<i::Object> error_obj = isolate->factory()->NewTypeError( "called_non_callable", i::HandleVector<i::Object>(&object, 1)); - isolate->Throw(*error_obj); - *has_pending_exception = true; - - return isolate->factory()->undefined_value(); + return isolate->Throw<Object>(error_obj); } void Execution::RunMicrotasks(Isolate* isolate) { ASSERT(isolate->microtask_pending()); - bool threw = false; Execution::Call( isolate, isolate->run_microtasks(), isolate->factory()->undefined_value(), 0, - NULL, - &threw); - ASSERT(!threw); + NULL).Check(); } void Execution::EnqueueMicrotask(Isolate* isolate, Handle<Object> microtask) { - bool threw = false; Handle<Object> args[] = { microtask }; Execution::Call( isolate, - isolate->enqueue_external_microtask(), + isolate->enqueue_microtask(), isolate->factory()->undefined_value(), 1, - args, - &threw); - ASSERT(!threw); + args).Check(); } @@ -529,7 +476,6 @@ void StackGuard::DeoptMarkedAllocationSites() { } -#ifdef ENABLE_DEBUGGER_SUPPORT bool StackGuard::IsDebugBreak() { ExecutionAccess access(isolate_); return thread_local_.interrupt_flags_ & DEBUGBREAK; @@ -550,13 +496,11 @@ bool StackGuard::IsDebugCommand() { void StackGuard::DebugCommand() { - if (FLAG_debugger_auto_break) { - ExecutionAccess access(isolate_); - thread_local_.interrupt_flags_ |= DEBUGCOMMAND; - set_interrupt_limits(access); - } + ExecutionAccess access(isolate_); + thread_local_.interrupt_flags_ |= DEBUGCOMMAND; + set_interrupt_limits(access); } -#endif + void StackGuard::Continue(InterruptFlag after_what) { ExecutionAccess access(isolate_); @@ -698,78 +642,78 @@ void StackGuard::InitThread(const ExecutionAccess& lock) { // --- C a l l s t o n a t i v e s --- -#define RETURN_NATIVE_CALL(name, args, has_pending_exception) \ +#define RETURN_NATIVE_CALL(name, args) \ do { \ Handle<Object> argv[] = args; \ - ASSERT(has_pending_exception != NULL); \ return Call(isolate, \ isolate->name##_fun(), \ isolate->js_builtins_object(), \ - ARRAY_SIZE(argv), argv, \ - has_pending_exception); \ + ARRAY_SIZE(argv), argv); \ } while (false) -Handle<Object> Execution::ToNumber( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_number, { obj }, exc); +MaybeHandle<Object> Execution::ToNumber( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_number, { obj }); } -Handle<Object> Execution::ToString( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_string, { obj }, exc); +MaybeHandle<Object> Execution::ToString( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_string, { obj }); } -Handle<Object> Execution::ToDetailString( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_detail_string, { obj }, exc); +MaybeHandle<Object> Execution::ToDetailString( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_detail_string, { obj }); } -Handle<Object> Execution::ToObject( - Isolate* isolate, Handle<Object> obj, bool* exc) { +MaybeHandle<Object> Execution::ToObject( + Isolate* isolate, Handle<Object> obj) { if (obj->IsSpecObject()) return obj; - RETURN_NATIVE_CALL(to_object, { obj }, exc); + RETURN_NATIVE_CALL(to_object, { obj }); } -Handle<Object> Execution::ToInteger( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_integer, { obj }, exc); +MaybeHandle<Object> Execution::ToInteger( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_integer, { obj }); } -Handle<Object> Execution::ToUint32( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_uint32, { obj }, exc); +MaybeHandle<Object> Execution::ToUint32( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_uint32, { obj }); } -Handle<Object> Execution::ToInt32( - Isolate* isolate, Handle<Object> obj, bool* exc) { - RETURN_NATIVE_CALL(to_int32, { obj }, exc); +MaybeHandle<Object> Execution::ToInt32( + Isolate* isolate, Handle<Object> obj) { + RETURN_NATIVE_CALL(to_int32, { obj }); } -Handle<Object> Execution::NewDate(Isolate* isolate, double time, bool* exc) { +MaybeHandle<Object> Execution::NewDate(Isolate* isolate, double time) { Handle<Object> time_obj = isolate->factory()->NewNumber(time); - RETURN_NATIVE_CALL(create_date, { time_obj }, exc); + RETURN_NATIVE_CALL(create_date, { time_obj }); } #undef RETURN_NATIVE_CALL -Handle<JSRegExp> Execution::NewJSRegExp(Handle<String> pattern, - Handle<String> flags, - bool* exc) { +MaybeHandle<JSRegExp> Execution::NewJSRegExp(Handle<String> pattern, + Handle<String> flags) { + Isolate* isolate = pattern->GetIsolate(); Handle<JSFunction> function = Handle<JSFunction>( - pattern->GetIsolate()->native_context()->regexp_function()); - Handle<Object> re_obj = RegExpImpl::CreateRegExpLiteral( - function, pattern, flags, exc); - if (*exc) return Handle<JSRegExp>(); + isolate->native_context()->regexp_function()); + Handle<Object> re_obj; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, re_obj, + RegExpImpl::CreateRegExpLiteral(function, pattern, flags), + JSRegExp); return Handle<JSRegExp>::cast(re_obj); } @@ -783,97 +727,90 @@ Handle<Object> Execution::CharAt(Handle<String> string, uint32_t index) { return factory->undefined_value(); } - Handle<Object> char_at = GetProperty( - isolate, isolate->js_builtins_object(), factory->char_at_string()); + Handle<Object> char_at = Object::GetProperty( + isolate->js_builtins_object(), + factory->char_at_string()).ToHandleChecked(); if (!char_at->IsJSFunction()) { return factory->undefined_value(); } - bool caught_exception; Handle<Object> index_object = factory->NewNumberFromInt(int_index); Handle<Object> index_arg[] = { index_object }; - Handle<Object> result = TryCall(Handle<JSFunction>::cast(char_at), - string, - ARRAY_SIZE(index_arg), - index_arg, - &caught_exception); - if (caught_exception) { + Handle<Object> result; + if (!TryCall(Handle<JSFunction>::cast(char_at), + string, + ARRAY_SIZE(index_arg), + index_arg).ToHandle(&result)) { return factory->undefined_value(); } return result; } -Handle<JSFunction> Execution::InstantiateFunction( - Handle<FunctionTemplateInfo> data, - bool* exc) { +MaybeHandle<JSFunction> Execution::InstantiateFunction( + Handle<FunctionTemplateInfo> data) { Isolate* isolate = data->GetIsolate(); if (!data->do_not_cache()) { // Fast case: see if the function has already been instantiated int serial_number = Smi::cast(data->serial_number())->value(); Handle<JSObject> cache(isolate->native_context()->function_cache()); Handle<Object> elm = - Object::GetElementNoExceptionThrown(isolate, cache, serial_number); + Object::GetElement(isolate, cache, serial_number).ToHandleChecked(); if (elm->IsJSFunction()) return Handle<JSFunction>::cast(elm); } // The function has not yet been instantiated in this context; do it. Handle<Object> args[] = { data }; - Handle<Object> result = Call(isolate, - isolate->instantiate_fun(), - isolate->js_builtins_object(), - ARRAY_SIZE(args), - args, - exc); - if (*exc) return Handle<JSFunction>::null(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + Call(isolate, + isolate->instantiate_fun(), + isolate->js_builtins_object(), + ARRAY_SIZE(args), + args), + JSFunction); return Handle<JSFunction>::cast(result); } -Handle<JSObject> Execution::InstantiateObject(Handle<ObjectTemplateInfo> data, - bool* exc) { +MaybeHandle<JSObject> Execution::InstantiateObject( + Handle<ObjectTemplateInfo> data) { Isolate* isolate = data->GetIsolate(); + Handle<Object> result; if (data->property_list()->IsUndefined() && !data->constructor()->IsUndefined()) { - // Initialization to make gcc happy. - Object* result = NULL; - { - HandleScope scope(isolate); - Handle<FunctionTemplateInfo> cons_template = - Handle<FunctionTemplateInfo>( - FunctionTemplateInfo::cast(data->constructor())); - Handle<JSFunction> cons = InstantiateFunction(cons_template, exc); - if (*exc) return Handle<JSObject>::null(); - Handle<Object> value = New(cons, 0, NULL, exc); - if (*exc) return Handle<JSObject>::null(); - result = *value; - } - ASSERT(!*exc); - return Handle<JSObject>(JSObject::cast(result)); + Handle<FunctionTemplateInfo> cons_template = + Handle<FunctionTemplateInfo>( + FunctionTemplateInfo::cast(data->constructor())); + Handle<JSFunction> cons; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, cons, InstantiateFunction(cons_template), JSObject); + ASSIGN_RETURN_ON_EXCEPTION(isolate, result, New(cons, 0, NULL), JSObject); } else { Handle<Object> args[] = { data }; - Handle<Object> result = Call(isolate, - isolate->instantiate_fun(), - isolate->js_builtins_object(), - ARRAY_SIZE(args), - args, - exc); - if (*exc) return Handle<JSObject>::null(); - return Handle<JSObject>::cast(result); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + Call(isolate, + isolate->instantiate_fun(), + isolate->js_builtins_object(), + ARRAY_SIZE(args), + args), + JSObject); } + return Handle<JSObject>::cast(result); } -void Execution::ConfigureInstance(Isolate* isolate, - Handle<Object> instance, - Handle<Object> instance_template, - bool* exc) { +MaybeHandle<Object> Execution::ConfigureInstance( + Isolate* isolate, + Handle<Object> instance, + Handle<Object> instance_template) { Handle<Object> args[] = { instance, instance_template }; - Execution::Call(isolate, - isolate->configure_instance_fun(), - isolate->js_builtins_object(), - ARRAY_SIZE(args), - args, - exc); + return Execution::Call(isolate, + isolate->configure_instance_fun(), + isolate->js_builtins_object(), + ARRAY_SIZE(args), + args); } @@ -883,14 +820,14 @@ Handle<String> Execution::GetStackTraceLine(Handle<Object> recv, Handle<Object> is_global) { Isolate* isolate = fun->GetIsolate(); Handle<Object> args[] = { recv, fun, pos, is_global }; - bool caught_exception; - Handle<Object> result = TryCall(isolate->get_stack_trace_line_fun(), - isolate->js_builtins_object(), - ARRAY_SIZE(args), - args, - &caught_exception); - if (caught_exception || !result->IsString()) { - return isolate->factory()->empty_string(); + MaybeHandle<Object> maybe_result = + TryCall(isolate->get_stack_trace_line_fun(), + isolate->js_builtins_object(), + ARRAY_SIZE(args), + args); + Handle<Object> result; + if (!maybe_result.ToHandle(&result) || !result->IsString()) { + return isolate->factory()->empty_string(); } return Handle<String>::cast(result); @@ -901,7 +838,6 @@ static Object* RuntimePreempt(Isolate* isolate) { // Clear the preempt request flag. isolate->stack_guard()->Continue(PREEMPT); -#ifdef ENABLE_DEBUGGER_SUPPORT if (isolate->debug()->InDebugger()) { // If currently in the debugger don't do any actual preemption but record // that preemption occoured while in the debugger. @@ -911,19 +847,11 @@ static Object* RuntimePreempt(Isolate* isolate) { v8::Unlocker unlocker(reinterpret_cast<v8::Isolate*>(isolate)); Thread::YieldCPU(); } -#else - { // NOLINT - // Perform preemption. - v8::Unlocker unlocker(reinterpret_cast<v8::Isolate*>(isolate)); - Thread::YieldCPU(); - } -#endif return isolate->heap()->undefined_value(); } -#ifdef ENABLE_DEBUGGER_SUPPORT Object* Execution::DebugBreakHelper(Isolate* isolate) { // Just continue if breaks are disabled. if (isolate->debug()->disable_break()) { @@ -1001,9 +929,7 @@ void Execution::ProcessDebugMessages(Isolate* isolate, } -#endif - -MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) { +Object* Execution::HandleStackGuardInterrupt(Isolate* isolate) { StackGuard* stack_guard = isolate->stack_guard(); if (stack_guard->ShouldPostponeInterrupts()) { return isolate->heap()->undefined_value(); @@ -1022,11 +948,9 @@ MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) { isolate->counters()->stack_interrupts()->Increment(); isolate->counters()->runtime_profiler_ticks()->Increment(); -#ifdef ENABLE_DEBUGGER_SUPPORT if (stack_guard->IsDebugBreak() || stack_guard->IsDebugCommand()) { DebugBreakHelper(isolate); } -#endif if (stack_guard->IsPreempted()) RuntimePreempt(isolate); if (stack_guard->IsTerminateExecution()) { stack_guard->Continue(TERMINATE); @@ -1053,5 +977,4 @@ MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) { return isolate->heap()->undefined_value(); } - } } // namespace v8::internal diff --git a/deps/v8/src/execution.h b/deps/v8/src/execution.h index 592ecbdb6..c165faba0 100644 --- a/deps/v8/src/execution.h +++ b/deps/v8/src/execution.h @@ -1,39 +1,15 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXECUTION_H_ #define V8_EXECUTION_H_ -#include "allocation.h" +#include "handles.h" namespace v8 { namespace internal { - // Flag used to set the interrupt causes. enum InterruptFlag { INTERRUPT = 1 << 0, @@ -49,10 +25,7 @@ enum InterruptFlag { }; -class Isolate; - - -class Execution : public AllStatic { +class Execution V8_FINAL : public AllStatic { public: // Call a function, the caller supplies a receiver and an array // of arguments. Arguments are Object* type. After function returns, @@ -65,13 +38,13 @@ class Execution : public AllStatic { // and the function called is not in strict mode, receiver is converted to // an object. // - static Handle<Object> Call(Isolate* isolate, - Handle<Object> callable, - Handle<Object> receiver, - int argc, - Handle<Object> argv[], - bool* pending_exception, - bool convert_receiver = false); + MUST_USE_RESULT static MaybeHandle<Object> Call( + Isolate* isolate, + Handle<Object> callable, + Handle<Object> receiver, + int argc, + Handle<Object> argv[], + bool convert_receiver = false); // Construct object from function, the caller supplies an array of // arguments. Arguments are Object* type. After function returns, @@ -80,99 +53,93 @@ class Execution : public AllStatic { // *pending_exception tells whether the invoke resulted in // a pending exception. // - static Handle<Object> New(Handle<JSFunction> func, - int argc, - Handle<Object> argv[], - bool* pending_exception); + MUST_USE_RESULT static MaybeHandle<Object> New(Handle<JSFunction> func, + int argc, + Handle<Object> argv[]); // Call a function, just like Call(), but make sure to silently catch // any thrown exceptions. The return value is either the result of // calling the function (if caught exception is false) or the exception // that occurred (if caught exception is true). - static Handle<Object> TryCall(Handle<JSFunction> func, - Handle<Object> receiver, - int argc, - Handle<Object> argv[], - bool* caught_exception); + static MaybeHandle<Object> TryCall( + Handle<JSFunction> func, + Handle<Object> receiver, + int argc, + Handle<Object> argv[], + Handle<Object>* exception_out = NULL); // ECMA-262 9.3 - static Handle<Object> ToNumber( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToNumber( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.4 - static Handle<Object> ToInteger( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToInteger( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.5 - static Handle<Object> ToInt32( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToInt32( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.6 - static Handle<Object> ToUint32( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToUint32( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.8 - static Handle<Object> ToString( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToString( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.8 - static Handle<Object> ToDetailString( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToDetailString( + Isolate* isolate, Handle<Object> obj); // ECMA-262 9.9 - static Handle<Object> ToObject( - Isolate* isolate, Handle<Object> obj, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> ToObject( + Isolate* isolate, Handle<Object> obj); // Create a new date object from 'time'. - static Handle<Object> NewDate( - Isolate* isolate, double time, bool* exc); + MUST_USE_RESULT static MaybeHandle<Object> NewDate( + Isolate* isolate, double time); // Create a new regular expression object from 'pattern' and 'flags'. - static Handle<JSRegExp> NewJSRegExp(Handle<String> pattern, - Handle<String> flags, - bool* exc); + MUST_USE_RESULT static MaybeHandle<JSRegExp> NewJSRegExp( + Handle<String> pattern, Handle<String> flags); // Used to implement [] notation on strings (calls JS code) static Handle<Object> CharAt(Handle<String> str, uint32_t index); static Handle<Object> GetFunctionFor(); - static Handle<JSFunction> InstantiateFunction( - Handle<FunctionTemplateInfo> data, bool* exc); - static Handle<JSObject> InstantiateObject(Handle<ObjectTemplateInfo> data, - bool* exc); - static void ConfigureInstance(Isolate* isolate, - Handle<Object> instance, - Handle<Object> data, - bool* exc); + MUST_USE_RESULT static MaybeHandle<JSFunction> InstantiateFunction( + Handle<FunctionTemplateInfo> data); + MUST_USE_RESULT static MaybeHandle<JSObject> InstantiateObject( + Handle<ObjectTemplateInfo> data); + MUST_USE_RESULT static MaybeHandle<Object> ConfigureInstance( + Isolate* isolate, Handle<Object> instance, Handle<Object> data); static Handle<String> GetStackTraceLine(Handle<Object> recv, Handle<JSFunction> fun, Handle<Object> pos, Handle<Object> is_global); -#ifdef ENABLE_DEBUGGER_SUPPORT + static Object* DebugBreakHelper(Isolate* isolate); static void ProcessDebugMessages(Isolate* isolate, bool debug_command_only); -#endif // If the stack guard is triggered, but it is not an actual // stack overflow, then handle the interruption accordingly. - MUST_USE_RESULT static MaybeObject* HandleStackGuardInterrupt( - Isolate* isolate); + static Object* HandleStackGuardInterrupt(Isolate* isolate); // Get a function delegate (or undefined) for the given non-function // object. Used for support calling objects as functions. static Handle<Object> GetFunctionDelegate(Isolate* isolate, Handle<Object> object); - static Handle<Object> TryGetFunctionDelegate(Isolate* isolate, - Handle<Object> object, - bool* has_pending_exception); + MUST_USE_RESULT static MaybeHandle<Object> TryGetFunctionDelegate( + Isolate* isolate, + Handle<Object> object); // Get a function delegate (or undefined) for the given non-function // object. Used for support calling objects as constructors. static Handle<Object> GetConstructorDelegate(Isolate* isolate, Handle<Object> object); - static Handle<Object> TryGetConstructorDelegate(Isolate* isolate, - Handle<Object> object, - bool* has_pending_exception); + static MaybeHandle<Object> TryGetConstructorDelegate(Isolate* isolate, + Handle<Object> object); static void RunMicrotasks(Isolate* isolate); static void EnqueueMicrotask(Isolate* isolate, Handle<Object> microtask); @@ -185,7 +152,7 @@ class ExecutionAccess; // StackGuard contains the handling of the limits that are used to limit the // number of nested invocations of JavaScript and the stack size used in each // invocation. -class StackGuard { +class StackGuard V8_FINAL { public: // Pass the address beyond which the stack should not grow. The stack // is assumed to grow downwards. @@ -211,12 +178,10 @@ class StackGuard { bool IsTerminateExecution(); void TerminateExecution(); void CancelTerminateExecution(); -#ifdef ENABLE_DEBUGGER_SUPPORT bool IsDebugBreak(); void DebugBreak(); bool IsDebugCommand(); void DebugCommand(); -#endif bool IsGCRequest(); void RequestGC(); bool IsInstallCodeRequest(); @@ -290,7 +255,7 @@ class StackGuard { static const uintptr_t kIllegalLimit = 0xfffffff8; #endif - class ThreadLocal { + class ThreadLocal V8_FINAL { public: ThreadLocal() { Clear(); } // You should hold the ExecutionAccess lock when you call Initialize or @@ -334,7 +299,6 @@ class StackGuard { DISALLOW_COPY_AND_ASSIGN(StackGuard); }; - } } // namespace v8::internal #endif // V8_EXECUTION_H_ diff --git a/deps/v8/src/extensions/externalize-string-extension.cc b/deps/v8/src/extensions/externalize-string-extension.cc index adc5577d9..c942b613a 100644 --- a/deps/v8/src/extensions/externalize-string-extension.cc +++ b/deps/v8/src/extensions/externalize-string-extension.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "externalize-string-extension.h" diff --git a/deps/v8/src/extensions/externalize-string-extension.h b/deps/v8/src/extensions/externalize-string-extension.h index d0cb5e47d..305c67dcd 100644 --- a/deps/v8/src/extensions/externalize-string-extension.h +++ b/deps/v8/src/extensions/externalize-string-extension.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXTENSIONS_EXTERNALIZE_STRING_EXTENSION_H_ #define V8_EXTENSIONS_EXTERNALIZE_STRING_EXTENSION_H_ diff --git a/deps/v8/src/extensions/free-buffer-extension.cc b/deps/v8/src/extensions/free-buffer-extension.cc index b4abaafe2..4ca0ab591 100644 --- a/deps/v8/src/extensions/free-buffer-extension.cc +++ b/deps/v8/src/extensions/free-buffer-extension.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "free-buffer-extension.h" #include "platform.h" diff --git a/deps/v8/src/extensions/free-buffer-extension.h b/deps/v8/src/extensions/free-buffer-extension.h index 26ff7d1bb..e1fc9fb86 100644 --- a/deps/v8/src/extensions/free-buffer-extension.h +++ b/deps/v8/src/extensions/free-buffer-extension.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXTENSIONS_FREE_BUFFER_EXTENSION_H_ #define V8_EXTENSIONS_FREE_BUFFER_EXTENSION_H_ diff --git a/deps/v8/src/extensions/gc-extension.cc b/deps/v8/src/extensions/gc-extension.cc index 1d4873de7..5a5884df9 100644 --- a/deps/v8/src/extensions/gc-extension.cc +++ b/deps/v8/src/extensions/gc-extension.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "gc-extension.h" #include "platform.h" diff --git a/deps/v8/src/extensions/gc-extension.h b/deps/v8/src/extensions/gc-extension.h index 105c5ad5b..05f39354a 100644 --- a/deps/v8/src/extensions/gc-extension.h +++ b/deps/v8/src/extensions/gc-extension.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXTENSIONS_GC_EXTENSION_H_ #define V8_EXTENSIONS_GC_EXTENSION_H_ diff --git a/deps/v8/src/extensions/statistics-extension.cc b/deps/v8/src/extensions/statistics-extension.cc index e10dddfcd..a7770e589 100644 --- a/deps/v8/src/extensions/statistics-extension.cc +++ b/deps/v8/src/extensions/statistics-extension.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "statistics-extension.h" diff --git a/deps/v8/src/extensions/statistics-extension.h b/deps/v8/src/extensions/statistics-extension.h index 9e97b45dd..7eea82b06 100644 --- a/deps/v8/src/extensions/statistics-extension.h +++ b/deps/v8/src/extensions/statistics-extension.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXTENSIONS_STATISTICS_EXTENSION_H_ #define V8_EXTENSIONS_STATISTICS_EXTENSION_H_ diff --git a/deps/v8/src/extensions/trigger-failure-extension.cc b/deps/v8/src/extensions/trigger-failure-extension.cc index 83894b922..31a9818de 100644 --- a/deps/v8/src/extensions/trigger-failure-extension.cc +++ b/deps/v8/src/extensions/trigger-failure-extension.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "trigger-failure-extension.h" #include "v8.h" diff --git a/deps/v8/src/extensions/trigger-failure-extension.h b/deps/v8/src/extensions/trigger-failure-extension.h index 467b7d28b..f012b8b58 100644 --- a/deps/v8/src/extensions/trigger-failure-extension.h +++ b/deps/v8/src/extensions/trigger-failure-extension.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_EXTENSIONS_TRIGGER_FAILURE_EXTENSION_H_ #define V8_EXTENSIONS_TRIGGER_FAILURE_EXTENSION_H_ diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc index 0868db851..9abb015d8 100644 --- a/deps/v8/src/factory.cc +++ b/deps/v8/src/factory.cc @@ -1,52 +1,61 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "v8.h" - -#include "api.h" -#include "debug.h" -#include "execution.h" +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + #include "factory.h" + +#include "conversions.h" #include "isolate-inl.h" #include "macro-assembler.h" -#include "objects.h" -#include "objects-visiting.h" -#include "platform.h" -#include "scopeinfo.h" namespace v8 { namespace internal { -Handle<Box> Factory::NewBox(Handle<Object> value, PretenureFlag pretenure) { +template<typename T> +Handle<T> Factory::New(Handle<Map> map, AllocationSpace space) { CALL_HEAP_FUNCTION( isolate(), - isolate()->heap()->AllocateBox(*value, pretenure), - Box); + isolate()->heap()->Allocate(*map, space), + T); +} + + +template<typename T> +Handle<T> Factory::New(Handle<Map> map, + AllocationSpace space, + Handle<AllocationSite> allocation_site) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->Allocate(*map, space, *allocation_site), + T); +} + + +Handle<HeapObject> Factory::NewFillerObject(int size, + bool double_align, + AllocationSpace space) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateFillerObject(size, double_align, space), + HeapObject); +} + + +Handle<Box> Factory::NewBox(Handle<Object> value) { + Handle<Box> result = Handle<Box>::cast(NewStruct(BOX_TYPE)); + result->set_value(*value); + return result; +} + + +Handle<Oddball> Factory::NewOddball(Handle<Map> map, + const char* to_string, + Handle<Object> to_number, + byte kind) { + Handle<Oddball> oddball = New<Oddball>(map, OLD_POINTER_SPACE); + Oddball::Initialize(isolate(), oddball, to_string, to_number, kind); + return oddball; } @@ -64,7 +73,9 @@ Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size, ASSERT(0 <= size); CALL_HEAP_FUNCTION( isolate(), - isolate()->heap()->AllocateFixedArrayWithHoles(size, pretenure), + isolate()->heap()->AllocateFixedArrayWithFiller(size, + pretenure, + *the_hole_value()), FixedArray); } @@ -77,13 +88,29 @@ Handle<FixedArray> Factory::NewUninitializedFixedArray(int size) { } -Handle<FixedDoubleArray> Factory::NewFixedDoubleArray(int size, - PretenureFlag pretenure) { +Handle<FixedArrayBase> Factory::NewFixedDoubleArray(int size, + PretenureFlag pretenure) { ASSERT(0 <= size); CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateUninitializedFixedDoubleArray(size, pretenure), - FixedDoubleArray); + FixedArrayBase); +} + + +Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles( + int size, + PretenureFlag pretenure) { + ASSERT(0 <= size); + Handle<FixedArrayBase> array = NewFixedDoubleArray(size, pretenure); + if (size > 0) { + Handle<FixedDoubleArray> double_array = + Handle<FixedDoubleArray>::cast(array); + for (int i = 0; i < size; ++i) { + double_array->set_the_hole(i); + } + } + return array; } @@ -104,113 +131,31 @@ Handle<ConstantPoolArray> Factory::NewConstantPoolArray( } -Handle<NameDictionary> Factory::NewNameDictionary(int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - NameDictionary::Allocate(isolate()->heap(), - at_least_space_for), - NameDictionary); -} - - -Handle<SeededNumberDictionary> Factory::NewSeededNumberDictionary( - int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - SeededNumberDictionary::Allocate(isolate()->heap(), - at_least_space_for), - SeededNumberDictionary); +Handle<OrderedHashSet> Factory::NewOrderedHashSet() { + return OrderedHashSet::Allocate(isolate(), 4); } -Handle<UnseededNumberDictionary> Factory::NewUnseededNumberDictionary( - int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - UnseededNumberDictionary::Allocate(isolate()->heap(), - at_least_space_for), - UnseededNumberDictionary); -} - - -Handle<ObjectHashSet> Factory::NewObjectHashSet(int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - ObjectHashSet::Allocate(isolate()->heap(), - at_least_space_for), - ObjectHashSet); -} - - -Handle<ObjectHashTable> Factory::NewObjectHashTable( - int at_least_space_for, - MinimumCapacity capacity_option) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - ObjectHashTable::Allocate(isolate()->heap(), - at_least_space_for, - capacity_option), - ObjectHashTable); -} - - -Handle<WeakHashTable> Factory::NewWeakHashTable(int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION( - isolate(), - WeakHashTable::Allocate(isolate()->heap(), - at_least_space_for, - USE_DEFAULT_MINIMUM_CAPACITY, - TENURED), - WeakHashTable); -} - - -Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors, - int slack) { - ASSERT(0 <= number_of_descriptors); - CALL_HEAP_FUNCTION(isolate(), - DescriptorArray::Allocate( - isolate(), number_of_descriptors, slack), - DescriptorArray); -} - - -Handle<DeoptimizationInputData> Factory::NewDeoptimizationInputData( - int deopt_entry_count, - PretenureFlag pretenure) { - ASSERT(deopt_entry_count > 0); - CALL_HEAP_FUNCTION(isolate(), - DeoptimizationInputData::Allocate(isolate(), - deopt_entry_count, - pretenure), - DeoptimizationInputData); -} - - -Handle<DeoptimizationOutputData> Factory::NewDeoptimizationOutputData( - int deopt_entry_count, - PretenureFlag pretenure) { - ASSERT(deopt_entry_count > 0); - CALL_HEAP_FUNCTION(isolate(), - DeoptimizationOutputData::Allocate(isolate(), - deopt_entry_count, - pretenure), - DeoptimizationOutputData); +Handle<OrderedHashMap> Factory::NewOrderedHashMap() { + return OrderedHashMap::Allocate(isolate(), 4); } Handle<AccessorPair> Factory::NewAccessorPair() { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateAccessorPair(), - AccessorPair); + Handle<AccessorPair> accessors = + Handle<AccessorPair>::cast(NewStruct(ACCESSOR_PAIR_TYPE)); + accessors->set_getter(*the_hole_value(), SKIP_WRITE_BARRIER); + accessors->set_setter(*the_hole_value(), SKIP_WRITE_BARRIER); + accessors->set_access_flags(Smi::FromInt(0), SKIP_WRITE_BARRIER); + return accessors; } Handle<TypeFeedbackInfo> Factory::NewTypeFeedbackInfo() { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateTypeFeedbackInfo(), - TypeFeedbackInfo); + Handle<TypeFeedbackInfo> info = + Handle<TypeFeedbackInfo>::cast(NewStruct(TYPE_FEEDBACK_INFO_TYPE)); + info->initialize_storage(); + return info; } @@ -223,9 +168,8 @@ Handle<String> Factory::InternalizeUtf8String(Vector<const char> string) { // Internalized strings are created in the old generation (data space). Handle<String> Factory::InternalizeString(Handle<String> string) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->InternalizeString(*string), - String); + if (string->IsInternalizedString()) return string; + return StringTable::LookupString(isolate(), string); } @@ -250,9 +194,7 @@ Handle<String> Factory::InternalizeTwoByteString(Vector<const uc16> string) { template<class StringTableKey> Handle<String> Factory::InternalizeStringWithKey(StringTableKey* key) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->InternalizeStringWithKey(key), - String); + return StringTable::LookupKey(isolate(), key); } @@ -262,25 +204,50 @@ template Handle<String> Factory::InternalizeStringWithKey< SubStringKey<uint16_t> > (SubStringKey<uint16_t>* key); -Handle<String> Factory::NewStringFromOneByte(Vector<const uint8_t> string, - PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( +MaybeHandle<String> Factory::NewStringFromOneByte(Vector<const uint8_t> string, + PretenureFlag pretenure) { + int length = string.length(); + if (length == 1) { + return LookupSingleCharacterStringFromCode(string[0]); + } + Handle<SeqOneByteString> result; + ASSIGN_RETURN_ON_EXCEPTION( isolate(), - isolate()->heap()->AllocateStringFromOneByte(string, pretenure), + result, + NewRawOneByteString(string.length(), pretenure), String); + + DisallowHeapAllocation no_gc; + // Copy the characters into the new object. + CopyChars(SeqOneByteString::cast(*result)->GetChars(), + string.start(), + length); + return result; } -Handle<String> Factory::NewStringFromUtf8(Vector<const char> string, - PretenureFlag pretenure) { +MaybeHandle<String> Factory::NewStringFromUtf8(Vector<const char> string, + PretenureFlag pretenure) { + // Check for ASCII first since this is the common case. + const char* start = string.start(); + int length = string.length(); + int non_ascii_start = String::NonAsciiStart(start, length); + if (non_ascii_start >= length) { + // If the string is ASCII, we do not need to convert the characters + // since UTF8 is backwards compatible with ASCII. + return NewStringFromOneByte(Vector<const uint8_t>::cast(string), pretenure); + } + // Non-ASCII and we need to decode. CALL_HEAP_FUNCTION( isolate(), - isolate()->heap()->AllocateStringFromUtf8(string, pretenure), + isolate()->heap()->AllocateStringFromUtf8Slow(string, + non_ascii_start, + pretenure), String); } -Handle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string, - PretenureFlag pretenure) { +MaybeHandle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string, + PretenureFlag pretenure) { CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateStringFromTwoByte(string, pretenure), @@ -288,8 +255,74 @@ Handle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string, } -Handle<SeqOneByteString> Factory::NewRawOneByteString(int length, - PretenureFlag pretenure) { +Handle<String> Factory::NewInternalizedStringFromUtf8(Vector<const char> str, + int chars, + uint32_t hash_field) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateInternalizedStringFromUtf8( + str, chars, hash_field), + String); +} + + +MUST_USE_RESULT Handle<String> Factory::NewOneByteInternalizedString( + Vector<const uint8_t> str, + uint32_t hash_field) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateOneByteInternalizedString(str, hash_field), + String); +} + + +MUST_USE_RESULT Handle<String> Factory::NewTwoByteInternalizedString( + Vector<const uc16> str, + uint32_t hash_field) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateTwoByteInternalizedString(str, hash_field), + String); +} + + +Handle<String> Factory::NewInternalizedStringImpl( + Handle<String> string, int chars, uint32_t hash_field) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateInternalizedStringImpl( + *string, chars, hash_field), + String); +} + + +MaybeHandle<Map> Factory::InternalizedStringMapForString( + Handle<String> string) { + // If the string is in new space it cannot be used as internalized. + if (isolate()->heap()->InNewSpace(*string)) return MaybeHandle<Map>(); + + // Find the corresponding internalized string map for strings. + switch (string->map()->instance_type()) { + case STRING_TYPE: return internalized_string_map(); + case ASCII_STRING_TYPE: return ascii_internalized_string_map(); + case EXTERNAL_STRING_TYPE: return external_internalized_string_map(); + case EXTERNAL_ASCII_STRING_TYPE: + return external_ascii_internalized_string_map(); + case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: + return external_internalized_string_with_one_byte_data_map(); + case SHORT_EXTERNAL_STRING_TYPE: + return short_external_internalized_string_map(); + case SHORT_EXTERNAL_ASCII_STRING_TYPE: + return short_external_ascii_internalized_string_map(); + case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: + return short_external_internalized_string_with_one_byte_data_map(); + default: return MaybeHandle<Map>(); // No match found. + } +} + + +MaybeHandle<SeqOneByteString> Factory::NewRawOneByteString( + int length, PretenureFlag pretenure) { CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateRawOneByteString(length, pretenure), @@ -297,8 +330,8 @@ Handle<SeqOneByteString> Factory::NewRawOneByteString(int length, } -Handle<SeqTwoByteString> Factory::NewRawTwoByteString(int length, - PretenureFlag pretenure) { +MaybeHandle<SeqTwoByteString> Factory::NewRawTwoByteString( + int length, PretenureFlag pretenure) { CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateRawTwoByteString(length, pretenure), @@ -306,6 +339,30 @@ Handle<SeqTwoByteString> Factory::NewRawTwoByteString(int length, } +Handle<String> Factory::LookupSingleCharacterStringFromCode(uint32_t code) { + if (code <= String::kMaxOneByteCharCodeU) { + { + DisallowHeapAllocation no_allocation; + Object* value = single_character_string_cache()->get(code); + if (value != *undefined_value()) { + return handle(String::cast(value), isolate()); + } + } + uint8_t buffer[1]; + buffer[0] = static_cast<uint8_t>(code); + Handle<String> result = + InternalizeOneByteString(Vector<const uint8_t>(buffer, 1)); + single_character_string_cache()->set(code, *result); + return result; + } + ASSERT(code <= String::kMaxUtf16CodeUnitU); + + Handle<SeqTwoByteString> result = NewRawTwoByteString(1).ToHandleChecked(); + result->SeqTwoByteStringSet(0, static_cast<uint16_t>(code)); + return result; +} + + // Returns true for a character in a range. Both limits are inclusive. static inline bool Between(uint32_t character, uint32_t from, uint32_t to) { // This makes uses of the the unsigned wraparound. @@ -319,10 +376,10 @@ static inline Handle<String> MakeOrFindTwoCharacterString(Isolate* isolate, // Numeric strings have a different hash algorithm not known by // LookupTwoCharsStringIfExists, so we skip this step for such strings. if (!Between(c1, '0', '9') || !Between(c2, '0', '9')) { - String* result; - StringTable* table = isolate->heap()->string_table(); - if (table->LookupTwoCharsStringIfExists(c1, c2, &result)) { - return handle(result); + Handle<String> result; + if (StringTable::LookupTwoCharsStringIfExists(isolate, c1, c2). + ToHandle(&result)) { + return result; } } @@ -331,13 +388,15 @@ static inline Handle<String> MakeOrFindTwoCharacterString(Isolate* isolate, if (static_cast<unsigned>(c1 | c2) <= String::kMaxOneByteCharCodeU) { // We can do this. ASSERT(IsPowerOf2(String::kMaxOneByteCharCodeU + 1)); // because of this. - Handle<SeqOneByteString> str = isolate->factory()->NewRawOneByteString(2); + Handle<SeqOneByteString> str = + isolate->factory()->NewRawOneByteString(2).ToHandleChecked(); uint8_t* dest = str->GetChars(); dest[0] = static_cast<uint8_t>(c1); dest[1] = static_cast<uint8_t>(c2); return str; } else { - Handle<SeqTwoByteString> str = isolate->factory()->NewRawTwoByteString(2); + Handle<SeqTwoByteString> str = + isolate->factory()->NewRawTwoByteString(2).ToHandleChecked(); uc16* dest = str->GetChars(); dest[0] = c1; dest[1] = c2; @@ -358,17 +417,8 @@ Handle<String> ConcatStringContent(Handle<StringType> result, } -Handle<ConsString> Factory::NewRawConsString(String::Encoding encoding) { - Handle<Map> map = (encoding == String::ONE_BYTE_ENCODING) - ? cons_ascii_string_map() : cons_string_map(); - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->Allocate(*map, NEW_SPACE), - ConsString); -} - - -Handle<String> Factory::NewConsString(Handle<String> left, - Handle<String> right) { +MaybeHandle<String> Factory::NewConsString(Handle<String> left, + Handle<String> right) { int left_length = left->length(); if (left_length == 0) return right; int right_length = right->length(); @@ -385,8 +435,7 @@ Handle<String> Factory::NewConsString(Handle<String> left, // Make sure that an out of memory exception is thrown if the length // of the new cons string is too large. if (length > String::kMaxLength || length < 0) { - isolate()->ThrowInvalidStringLength(); - return Handle<String>::null(); + return isolate()->Throw<String>(NewInvalidStringLengthError()); } bool left_is_one_byte = left->IsOneByteRepresentation(); @@ -413,7 +462,8 @@ Handle<String> Factory::NewConsString(Handle<String> left, STATIC_ASSERT(ConsString::kMinLength <= String::kMaxLength); if (is_one_byte) { - Handle<SeqOneByteString> result = NewRawOneByteString(length); + Handle<SeqOneByteString> result = + NewRawOneByteString(length).ToHandleChecked(); DisallowHeapAllocation no_gc; uint8_t* dest = result->GetChars(); // Copy left part. @@ -430,14 +480,15 @@ Handle<String> Factory::NewConsString(Handle<String> left, } return (is_one_byte_data_in_two_byte_string) - ? ConcatStringContent<uint8_t>(NewRawOneByteString(length), left, right) - : ConcatStringContent<uc16>(NewRawTwoByteString(length), left, right); + ? ConcatStringContent<uint8_t>( + NewRawOneByteString(length).ToHandleChecked(), left, right) + : ConcatStringContent<uc16>( + NewRawTwoByteString(length).ToHandleChecked(), left, right); } - Handle<ConsString> result = NewRawConsString( - (is_one_byte || is_one_byte_data_in_two_byte_string) - ? String::ONE_BYTE_ENCODING - : String::TWO_BYTE_ENCODING); + Handle<Map> map = (is_one_byte || is_one_byte_data_in_two_byte_string) + ? cons_ascii_string_map() : cons_string_map(); + Handle<ConsString> result = New<ConsString>(map, NEW_SPACE); DisallowHeapAllocation no_gc; WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); @@ -455,23 +506,14 @@ Handle<String> Factory::NewFlatConcatString(Handle<String> first, int total_length = first->length() + second->length(); if (first->IsOneByteRepresentation() && second->IsOneByteRepresentation()) { return ConcatStringContent<uint8_t>( - NewRawOneByteString(total_length), first, second); + NewRawOneByteString(total_length).ToHandleChecked(), first, second); } else { return ConcatStringContent<uc16>( - NewRawTwoByteString(total_length), first, second); + NewRawTwoByteString(total_length).ToHandleChecked(), first, second); } } -Handle<SlicedString> Factory::NewRawSlicedString(String::Encoding encoding) { - Handle<Map> map = (encoding == String::ONE_BYTE_ENCODING) - ? sliced_ascii_string_map() : sliced_string_map(); - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->Allocate(*map, NEW_SPACE), - SlicedString); -} - - Handle<String> Factory::NewProperSubString(Handle<String> str, int begin, int end) { @@ -480,10 +522,12 @@ Handle<String> Factory::NewProperSubString(Handle<String> str, #endif ASSERT(begin > 0 || end < str->length()); + str = String::Flatten(str); + int length = end - begin; if (length <= 0) return empty_string(); if (length == 1) { - return LookupSingleCharacterStringFromCode(isolate(), str->Get(begin)); + return LookupSingleCharacterStringFromCode(str->Get(begin)); } if (length == 2) { // Optimization for 2-byte strings often used as keys in a decompression @@ -496,15 +540,15 @@ Handle<String> Factory::NewProperSubString(Handle<String> str, if (!FLAG_string_slices || length < SlicedString::kMinLength) { if (str->IsOneByteRepresentation()) { - Handle<SeqOneByteString> result = NewRawOneByteString(length); - ASSERT(!result.is_null()); + Handle<SeqOneByteString> result = + NewRawOneByteString(length).ToHandleChecked(); uint8_t* dest = result->GetChars(); DisallowHeapAllocation no_gc; String::WriteToFlat(*str, dest, begin, end); return result; } else { - Handle<SeqTwoByteString> result = NewRawTwoByteString(length); - ASSERT(!result.is_null()); + Handle<SeqTwoByteString> result = + NewRawTwoByteString(length).ToHandleChecked(); uc16* dest = result->GetChars(); DisallowHeapAllocation no_gc; String::WriteToFlat(*str, dest, begin, end); @@ -514,34 +558,16 @@ Handle<String> Factory::NewProperSubString(Handle<String> str, int offset = begin; - while (str->IsConsString()) { - Handle<ConsString> cons = Handle<ConsString>::cast(str); - int split = cons->first()->length(); - if (split <= offset) { - // Slice is fully contained in the second part. - str = Handle<String>(cons->second(), isolate()); - offset -= split; // Adjust for offset. - continue; - } else if (offset + length <= split) { - // Slice is fully contained in the first part. - str = Handle<String>(cons->first(), isolate()); - continue; - } - break; - } - if (str->IsSlicedString()) { Handle<SlicedString> slice = Handle<SlicedString>::cast(str); str = Handle<String>(slice->parent(), isolate()); offset += slice->offset(); - } else { - str = FlattenGetString(str); } ASSERT(str->IsSeqString() || str->IsExternalString()); - Handle<SlicedString> slice = NewRawSlicedString( - str->IsOneByteRepresentation() ? String::ONE_BYTE_ENCODING - : String::TWO_BYTE_ENCODING); + Handle<Map> map = str->IsOneByteRepresentation() ? sliced_ascii_string_map() + : sliced_string_map(); + Handle<SlicedString> slice = New<SlicedString>(map, NEW_SPACE); slice->set_hash_field(String::kEmptyHashField); slice->set_length(length); @@ -551,21 +577,47 @@ Handle<String> Factory::NewProperSubString(Handle<String> str, } -Handle<String> Factory::NewExternalStringFromAscii( +MaybeHandle<String> Factory::NewExternalStringFromAscii( const ExternalAsciiString::Resource* resource) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateExternalStringFromAscii(resource), - String); + size_t length = resource->length(); + if (length > static_cast<size_t>(String::kMaxLength)) { + isolate()->ThrowInvalidStringLength(); + return MaybeHandle<String>(); + } + + Handle<Map> map = external_ascii_string_map(); + Handle<ExternalAsciiString> external_string = + New<ExternalAsciiString>(map, NEW_SPACE); + external_string->set_length(static_cast<int>(length)); + external_string->set_hash_field(String::kEmptyHashField); + external_string->set_resource(resource); + + return external_string; } -Handle<String> Factory::NewExternalStringFromTwoByte( +MaybeHandle<String> Factory::NewExternalStringFromTwoByte( const ExternalTwoByteString::Resource* resource) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateExternalStringFromTwoByte(resource), - String); + size_t length = resource->length(); + if (length > static_cast<size_t>(String::kMaxLength)) { + isolate()->ThrowInvalidStringLength(); + return MaybeHandle<String>(); + } + + // For small strings we check whether the resource contains only + // one byte characters. If yes, we use a different string map. + static const size_t kOneByteCheckLengthLimit = 32; + bool is_one_byte = length <= kOneByteCheckLengthLimit && + String::IsOneByte(resource->data(), static_cast<int>(length)); + Handle<Map> map = is_one_byte ? + external_string_with_one_byte_data_map() : external_string_map(); + Handle<ExternalTwoByteString> external_string = + New<ExternalTwoByteString>(map, NEW_SPACE); + external_string->set_length(static_cast<int>(length)); + external_string->set_hash_field(String::kEmptyHashField); + external_string->set_resource(resource); + + return external_string; } @@ -578,44 +630,59 @@ Handle<Symbol> Factory::NewSymbol() { Handle<Symbol> Factory::NewPrivateSymbol() { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocatePrivateSymbol(), - Symbol); + Handle<Symbol> symbol = NewSymbol(); + symbol->set_is_private(true); + return symbol; } Handle<Context> Factory::NewNativeContext() { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateNativeContext(), - Context); + Handle<FixedArray> array = NewFixedArray(Context::NATIVE_CONTEXT_SLOTS); + array->set_map_no_write_barrier(*native_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_js_array_maps(*undefined_value()); + ASSERT(context->IsNativeContext()); + return context; } Handle<Context> Factory::NewGlobalContext(Handle<JSFunction> function, Handle<ScopeInfo> scope_info) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateGlobalContext(*function, *scope_info), - Context); + Handle<FixedArray> array = + NewFixedArray(scope_info->ContextLength(), TENURED); + array->set_map_no_write_barrier(*global_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_closure(*function); + context->set_previous(function->context()); + context->set_extension(*scope_info); + context->set_global_object(function->context()->global_object()); + ASSERT(context->IsGlobalContext()); + return context; } Handle<Context> Factory::NewModuleContext(Handle<ScopeInfo> scope_info) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateModuleContext(*scope_info), - Context); + Handle<FixedArray> array = + NewFixedArray(scope_info->ContextLength(), TENURED); + array->set_map_no_write_barrier(*module_context_map()); + // Instance link will be set later. + Handle<Context> context = Handle<Context>::cast(array); + context->set_extension(Smi::FromInt(0)); + return context; } Handle<Context> Factory::NewFunctionContext(int length, Handle<JSFunction> function) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateFunctionContext(length, *function), - Context); + ASSERT(length >= Context::MIN_CONTEXT_SLOTS); + Handle<FixedArray> array = NewFixedArray(length); + array->set_map_no_write_barrier(*function_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_closure(*function); + context->set_previous(function->context()); + context->set_extension(Smi::FromInt(0)); + context->set_global_object(function->context()->global_object()); + return context; } @@ -623,35 +690,45 @@ Handle<Context> Factory::NewCatchContext(Handle<JSFunction> function, Handle<Context> previous, Handle<String> name, Handle<Object> thrown_object) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateCatchContext(*function, - *previous, - *name, - *thrown_object), - Context); + STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); + Handle<FixedArray> array = NewFixedArray(Context::MIN_CONTEXT_SLOTS + 1); + array->set_map_no_write_barrier(*catch_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_closure(*function); + context->set_previous(*previous); + context->set_extension(*name); + context->set_global_object(previous->global_object()); + context->set(Context::THROWN_OBJECT_INDEX, *thrown_object); + return context; } Handle<Context> Factory::NewWithContext(Handle<JSFunction> function, Handle<Context> previous, - Handle<JSObject> extension) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateWithContext(*function, *previous, *extension), - Context); + Handle<JSReceiver> extension) { + Handle<FixedArray> array = NewFixedArray(Context::MIN_CONTEXT_SLOTS); + array->set_map_no_write_barrier(*with_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_closure(*function); + context->set_previous(*previous); + context->set_extension(*extension); + context->set_global_object(previous->global_object()); + return context; } Handle<Context> Factory::NewBlockContext(Handle<JSFunction> function, Handle<Context> previous, Handle<ScopeInfo> scope_info) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateBlockContext(*function, - *previous, - *scope_info), - Context); + Handle<FixedArray> array = + NewFixedArrayWithHoles(scope_info->ContextLength()); + array->set_map_no_write_barrier(*block_context_map()); + Handle<Context> context = Handle<Context>::cast(array); + context->set_closure(*function); + context->set_previous(*previous); + context->set_extension(*scope_info); + context->set_global_object(previous->global_object()); + return context; } @@ -663,6 +740,15 @@ Handle<Struct> Factory::NewStruct(InstanceType type) { } +Handle<CodeCache> Factory::NewCodeCache() { + Handle<CodeCache> code_cache = + Handle<CodeCache>::cast(NewStruct(CODE_CACHE_TYPE)); + code_cache->set_default_cache(*empty_fixed_array(), SKIP_WRITE_BARRIER); + code_cache->set_normal_type_cache(*undefined_value(), SKIP_WRITE_BARRIER); + return code_cache; +} + + Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry( int aliased_context_slot) { Handle<AliasedArgumentsEntry> entry = Handle<AliasedArgumentsEntry>::cast( @@ -799,10 +885,14 @@ Handle<PropertyCell> Factory::NewPropertyCell(Handle<Object> value) { Handle<AllocationSite> Factory::NewAllocationSite() { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateAllocationSite(), - AllocationSite); + Handle<Map> map = allocation_site_map(); + Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE); + site->Initialize(); + + // Link the site + site->set_weak_next(isolate()->heap()->allocation_sites_list()); + isolate()->heap()->set_allocation_sites_list(*site); + return site; } @@ -816,75 +906,36 @@ Handle<Map> Factory::NewMap(InstanceType type, } -Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) { - // Make sure to use globals from the function's context, since the function - // can be from a different context. - Handle<Context> native_context(function->context()->native_context()); - Handle<Map> new_map; - if (function->shared()->is_generator()) { - // Generator prototypes can share maps since they don't have "constructor" - // properties. - new_map = handle(native_context->generator_object_prototype_map()); - } else { - // Each function prototype gets a fresh map to avoid unwanted sharing of - // maps between prototypes of different constructors. - Handle<JSFunction> object_function(native_context->object_function()); - ASSERT(object_function->has_initial_map()); - new_map = Map::Copy(handle(object_function->initial_map())); - } - - Handle<JSObject> prototype = NewJSObjectFromMap(new_map); - - if (!function->shared()->is_generator()) { - JSObject::SetLocalPropertyIgnoreAttributes(prototype, - constructor_string(), - function, - DONT_ENUM); - } - - return prototype; -} - - -Handle<Map> Factory::CopyWithPreallocatedFieldDescriptors(Handle<Map> src) { - CALL_HEAP_FUNCTION( - isolate(), src->CopyWithPreallocatedFieldDescriptors(), Map); +Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> object) { + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyJSObject(*object, NULL), + JSObject); } -Handle<Map> Factory::CopyMap(Handle<Map> src, - int extra_inobject_properties) { - Handle<Map> copy = CopyWithPreallocatedFieldDescriptors(src); - // Check that we do not overflow the instance size when adding the - // extra inobject properties. - int instance_size_delta = extra_inobject_properties * kPointerSize; - int max_instance_size_delta = - JSObject::kMaxInstanceSize - copy->instance_size(); - int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2; - if (extra_inobject_properties > max_extra_properties) { - // If the instance size overflows, we allocate as many properties - // as we can as inobject properties. - instance_size_delta = max_instance_size_delta; - extra_inobject_properties = max_extra_properties; - } - // Adjust the map with the extra inobject properties. - int inobject_properties = - copy->inobject_properties() + extra_inobject_properties; - copy->set_inobject_properties(inobject_properties); - copy->set_unused_property_fields(inobject_properties); - copy->set_instance_size(copy->instance_size() + instance_size_delta); - copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy)); - return copy; +Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( + Handle<JSObject> object, + Handle<AllocationSite> site) { + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyJSObject( + *object, + site.is_null() ? NULL : *site), + JSObject); } -Handle<Map> Factory::CopyMap(Handle<Map> src) { - CALL_HEAP_FUNCTION(isolate(), src->Copy(), Map); +Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array, + Handle<Map> map) { + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyFixedArrayWithMap(*array, *map), + FixedArray); } Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) { - CALL_HEAP_FUNCTION(isolate(), array->Copy(), FixedArray); + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyFixedArray(*array), + FixedArray); } @@ -897,125 +948,54 @@ Handle<FixedArray> Factory::CopyAndTenureFixedCOWArray( } -Handle<FixedArray> Factory::CopySizeFixedArray(Handle<FixedArray> array, - int new_length, - PretenureFlag pretenure) { - CALL_HEAP_FUNCTION(isolate(), - array->CopySize(new_length, pretenure), - FixedArray); -} - - Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray( Handle<FixedDoubleArray> array) { - CALL_HEAP_FUNCTION(isolate(), array->Copy(), FixedDoubleArray); + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyFixedDoubleArray(*array), + FixedDoubleArray); } Handle<ConstantPoolArray> Factory::CopyConstantPoolArray( Handle<ConstantPoolArray> array) { - CALL_HEAP_FUNCTION(isolate(), array->Copy(), ConstantPoolArray); -} - - -Handle<JSFunction> Factory::BaseNewFunctionFromSharedFunctionInfo( - Handle<SharedFunctionInfo> function_info, - Handle<Map> function_map, - PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateFunction(*function_map, - *function_info, - isolate()->heap()->the_hole_value(), - pretenure), - JSFunction); -} - - -static Handle<Map> MapForNewFunction(Isolate *isolate, - Handle<SharedFunctionInfo> function_info) { - Context *context = isolate->context()->native_context(); - int map_index = Context::FunctionMapIndex(function_info->strict_mode(), - function_info->is_generator()); - return Handle<Map>(Map::cast(context->get(map_index))); -} - - -Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( - Handle<SharedFunctionInfo> function_info, - Handle<Context> context, - PretenureFlag pretenure) { - Handle<JSFunction> result = BaseNewFunctionFromSharedFunctionInfo( - function_info, - MapForNewFunction(isolate(), function_info), - pretenure); - - if (function_info->ic_age() != isolate()->heap()->global_ic_age()) { - function_info->ResetForNewContext(isolate()->heap()->global_ic_age()); - } - - result->set_context(*context); - - int index = function_info->SearchOptimizedCodeMap(context->native_context(), - BailoutId::None()); - if (!function_info->bound() && index < 0) { - int number_of_literals = function_info->num_literals(); - Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure); - if (number_of_literals > 0) { - // Store the native context in the literals array prefix. This - // context will be used when creating object, regexp and array - // literals in this function. - literals->set(JSFunction::kLiteralNativeContextIndex, - context->native_context()); - } - result->set_literals(*literals); - } - - if (index > 0) { - // Caching of optimized code enabled and optimized code found. - FixedArray* literals = - function_info->GetLiteralsFromOptimizedCodeMap(index); - if (literals != NULL) result->set_literals(literals); - Code* code = function_info->GetCodeFromOptimizedCodeMap(index); - ASSERT(!code->marked_for_deoptimization()); - result->ReplaceCode(code); - return result; - } - - if (isolate()->use_crankshaft() && - FLAG_always_opt && - result->is_compiled() && - !function_info->is_toplevel() && - function_info->allows_lazy_compilation() && - !function_info->optimization_disabled() && - !isolate()->DebuggerHasBreakPoints()) { - result->MarkForOptimization(); - } - return result; + CALL_HEAP_FUNCTION(isolate(), + isolate()->heap()->CopyConstantPoolArray(*array), + ConstantPoolArray); } Handle<Object> Factory::NewNumber(double value, PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->NumberFromDouble(value, pretenure), Object); + // We need to distinguish the minus zero value and this cannot be + // done after conversion to int. Doing this by comparing bit + // patterns is faster than using fpclassify() et al. + if (IsMinusZero(value)) return NewHeapNumber(-0.0, pretenure); + + int int_value = FastD2I(value); + if (value == int_value && Smi::IsValid(int_value)) { + return handle(Smi::FromInt(int_value), isolate()); + } + + // Materialize the value in the heap. + return NewHeapNumber(value, pretenure); } Handle<Object> Factory::NewNumberFromInt(int32_t value, PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->NumberFromInt32(value, pretenure), Object); + if (Smi::IsValid(value)) return handle(Smi::FromInt(value), isolate()); + // Bypass NumberFromDouble to avoid various redundant checks. + return NewHeapNumber(FastI2D(value), pretenure); } Handle<Object> Factory::NewNumberFromUint(uint32_t value, - PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->NumberFromUint32(value, pretenure), Object); + PretenureFlag pretenure) { + int32_t int32v = static_cast<int32_t>(value); + if (int32v >= 0 && Smi::IsValid(int32v)) { + return handle(Smi::FromInt(int32v), isolate()); + } + return NewHeapNumber(FastUI2D(value), pretenure); } @@ -1027,15 +1007,6 @@ Handle<HeapNumber> Factory::NewHeapNumber(double value, } -Handle<JSObject> Factory::NewNeanderObject() { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateJSObjectFromMap( - isolate()->heap()->neander_map()), - JSObject); -} - - Handle<Object> Factory::NewTypeError(const char* message, Vector< Handle<Object> > args) { return NewError("MakeTypeError", message, args); @@ -1131,7 +1102,7 @@ Handle<String> Factory::EmergencyNewError(const char* message, space--; if (space > 0) { Handle<String> arg_str = Handle<String>::cast( - Object::GetElementNoExceptionThrown(isolate(), args, i)); + Object::GetElement(isolate(), args, i).ToHandleChecked()); SmartArrayPointer<char> arg = arg_str->ToCString(); Vector<char> v2(p, static_cast<int>(space)); OS::StrNCpy(v2, arg.get(), space); @@ -1145,8 +1116,7 @@ Handle<String> Factory::EmergencyNewError(const char* message, } else { buffer[kBufferSize - 1] = '\0'; } - Handle<String> error_string = NewStringFromUtf8(CStrVector(buffer), TENURED); - return error_string; + return NewStringFromUtf8(CStrVector(buffer), TENURED).ToHandleChecked(); } @@ -1154,9 +1124,8 @@ Handle<Object> Factory::NewError(const char* maker, const char* message, Handle<JSArray> args) { Handle<String> make_str = InternalizeUtf8String(maker); - Handle<Object> fun_obj( - isolate()->js_builtins_object()->GetPropertyNoExceptionThrown(*make_str), - isolate()); + Handle<Object> fun_obj = Object::GetProperty( + isolate()->js_builtins_object(), make_str).ToHandleChecked(); // If the builtins haven't been properly configured yet this error // constructor may not have been defined. Bail out. if (!fun_obj->IsJSFunction()) { @@ -1168,12 +1137,15 @@ Handle<Object> Factory::NewError(const char* maker, // Invoke the JavaScript factory method. If an exception is thrown while // running the factory method, use the exception as the result. - bool caught_exception; - Handle<Object> result = Execution::TryCall(fun, - isolate()->js_builtins_object(), - ARRAY_SIZE(argv), - argv, - &caught_exception); + Handle<Object> result; + Handle<Object> exception; + if (!Execution::TryCall(fun, + isolate()->js_builtins_object(), + ARRAY_SIZE(argv), + argv, + &exception).ToHandle(&result)) { + return exception; + } return result; } @@ -1186,41 +1158,104 @@ Handle<Object> Factory::NewError(Handle<String> message) { Handle<Object> Factory::NewError(const char* constructor, Handle<String> message) { Handle<String> constr = InternalizeUtf8String(constructor); - Handle<JSFunction> fun = Handle<JSFunction>( - JSFunction::cast(isolate()->js_builtins_object()-> - GetPropertyNoExceptionThrown(*constr))); + Handle<JSFunction> fun = Handle<JSFunction>::cast(Object::GetProperty( + isolate()->js_builtins_object(), constr).ToHandleChecked()); Handle<Object> argv[] = { message }; // Invoke the JavaScript factory method. If an exception is thrown while // running the factory method, use the exception as the result. - bool caught_exception; - Handle<Object> result = Execution::TryCall(fun, - isolate()->js_builtins_object(), - ARRAY_SIZE(argv), - argv, - &caught_exception); + Handle<Object> result; + Handle<Object> exception; + if (!Execution::TryCall(fun, + isolate()->js_builtins_object(), + ARRAY_SIZE(argv), + argv, + &exception).ToHandle(&result)) { + return exception; + } + return result; +} + + +void Factory::InitializeFunction(Handle<JSFunction> function, + Handle<SharedFunctionInfo> info, + Handle<Context> context) { + function->initialize_properties(); + function->initialize_elements(); + function->set_shared(*info); + function->set_code(info->code()); + function->set_context(*context); + function->set_prototype_or_initial_map(*the_hole_value()); + function->set_literals_or_bindings(*empty_fixed_array()); + function->set_next_function_link(*undefined_value()); +} + + +Handle<JSFunction> Factory::NewFunction(Handle<Map> map, + Handle<SharedFunctionInfo> info, + Handle<Context> context, + PretenureFlag pretenure) { + AllocationSpace space = pretenure == TENURED ? OLD_POINTER_SPACE : NEW_SPACE; + Handle<JSFunction> result = New<JSFunction>(map, space); + InitializeFunction(result, info, context); return result; } Handle<JSFunction> Factory::NewFunction(Handle<String> name, + Handle<Code> code, + MaybeHandle<Object> maybe_prototype) { + Handle<SharedFunctionInfo> info = NewSharedFunctionInfo(name); + ASSERT(info->strict_mode() == SLOPPY); + info->set_code(*code); + Handle<Context> context(isolate()->context()->native_context()); + Handle<Map> map = maybe_prototype.is_null() + ? isolate()->sloppy_function_without_prototype_map() + : isolate()->sloppy_function_map(); + Handle<JSFunction> result = NewFunction(map, info, context); + Handle<Object> prototype; + if (maybe_prototype.ToHandle(&prototype)) { + result->set_prototype_or_initial_map(*prototype); + } + return result; +} + + +Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, + Handle<Object> prototype) { + Handle<SharedFunctionInfo> info = NewSharedFunctionInfo(name); + ASSERT(info->strict_mode() == SLOPPY); + Handle<Context> context(isolate()->context()->native_context()); + Handle<Map> map = isolate()->sloppy_function_map(); + Handle<JSFunction> result = NewFunction(map, info, context); + result->set_prototype_or_initial_map(*prototype); + return result; +} + + +Handle<JSFunction> Factory::NewFunction(MaybeHandle<Object> maybe_prototype, + Handle<String> name, InstanceType type, int instance_size, Handle<Code> code, bool force_initial_map) { // Allocate the function - Handle<JSFunction> function = NewFunction(name, the_hole_value()); - - // Set up the code pointer in both the shared function info and in - // the function itself. - function->shared()->set_code(*code); - function->set_code(*code); + Handle<JSFunction> function = NewFunction(name, code, maybe_prototype); if (force_initial_map || type != JS_OBJECT_TYPE || instance_size != JSObject::kHeaderSize) { + Handle<Object> prototype = maybe_prototype.ToHandleChecked(); Handle<Map> initial_map = NewMap(type, instance_size); - Handle<JSObject> prototype = NewFunctionPrototype(function); + if (prototype->IsJSObject()) { + JSObject::SetLocalPropertyIgnoreAttributes( + Handle<JSObject>::cast(prototype), + constructor_string(), + function, + DONT_ENUM).Assert(); + } else if (!function->shared()->is_generator()) { + prototype = NewFunctionPrototype(function); + } initial_map->set_prototype(*prototype); function->set_initial_map(*initial_map); initial_map->set_constructor(*function); @@ -1233,6 +1268,16 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name, } +Handle<JSFunction> Factory::NewFunction(Handle<String> name, + InstanceType type, + int instance_size, + Handle<Code> code, + bool force_initial_map) { + return NewFunction( + the_hole_value(), name, type, instance_size, code, force_initial_map); +} + + Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, InstanceType type, int instance_size, @@ -1240,12 +1285,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, Handle<Code> code, bool force_initial_map) { // Allocate the function. - Handle<JSFunction> function = NewFunction(name, prototype); - - // Set up the code pointer in both the shared function info and in - // the function itself. - function->shared()->set_code(*code); - function->set_code(*code); + Handle<JSFunction> function = NewFunction(name, code, prototype); if (force_initial_map || type != JS_OBJECT_TYPE || @@ -1262,29 +1302,119 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, } -Handle<JSFunction> Factory::NewFunctionWithoutPrototype(Handle<String> name, - Handle<Code> code) { - Handle<JSFunction> function = NewFunctionWithoutPrototype(name, SLOPPY); - function->shared()->set_code(*code); - function->set_code(*code); - ASSERT(!function->has_initial_map()); - ASSERT(!function->has_prototype()); - return function; +Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) { + // Make sure to use globals from the function's context, since the function + // can be from a different context. + Handle<Context> native_context(function->context()->native_context()); + Handle<Map> new_map; + if (function->shared()->is_generator()) { + // Generator prototypes can share maps since they don't have "constructor" + // properties. + new_map = handle(native_context->generator_object_prototype_map()); + } else { + // Each function prototype gets a fresh map to avoid unwanted sharing of + // maps between prototypes of different constructors. + Handle<JSFunction> object_function(native_context->object_function()); + ASSERT(object_function->has_initial_map()); + new_map = Map::Copy(handle(object_function->initial_map())); + } + + Handle<JSObject> prototype = NewJSObjectFromMap(new_map); + + if (!function->shared()->is_generator()) { + JSObject::SetLocalPropertyIgnoreAttributes(prototype, + constructor_string(), + function, + DONT_ENUM).Assert(); + } + + return prototype; +} + + +Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( + Handle<SharedFunctionInfo> info, + Handle<Context> context, + PretenureFlag pretenure) { + int map_index = Context::FunctionMapIndex(info->strict_mode(), + info->is_generator()); + Handle<Map> map(Map::cast(context->native_context()->get(map_index))); + Handle<JSFunction> result = NewFunction(map, info, context, pretenure); + + if (info->ic_age() != isolate()->heap()->global_ic_age()) { + info->ResetForNewContext(isolate()->heap()->global_ic_age()); + } + + int index = info->SearchOptimizedCodeMap(context->native_context(), + BailoutId::None()); + if (!info->bound() && index < 0) { + int number_of_literals = info->num_literals(); + Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure); + if (number_of_literals > 0) { + // Store the native context in the literals array prefix. This + // context will be used when creating object, regexp and array + // literals in this function. + literals->set(JSFunction::kLiteralNativeContextIndex, + context->native_context()); + } + result->set_literals(*literals); + } + + if (index > 0) { + // Caching of optimized code enabled and optimized code found. + FixedArray* literals = info->GetLiteralsFromOptimizedCodeMap(index); + if (literals != NULL) result->set_literals(literals); + Code* code = info->GetCodeFromOptimizedCodeMap(index); + ASSERT(!code->marked_for_deoptimization()); + result->ReplaceCode(code); + return result; + } + + if (isolate()->use_crankshaft() && + FLAG_always_opt && + result->is_compiled() && + !info->is_toplevel() && + info->allows_lazy_compilation() && + !info->optimization_disabled() && + !isolate()->DebuggerHasBreakPoints()) { + result->MarkForOptimization(); + } + return result; +} + + +Handle<JSObject> Factory::NewIteratorResultObject(Handle<Object> value, + bool done) { + Handle<Map> map(isolate()->native_context()->iterator_result_map()); + Handle<JSObject> result = NewJSObjectFromMap(map, NOT_TENURED, false); + result->InObjectPropertyAtPut( + JSGeneratorObject::kResultValuePropertyIndex, *value); + result->InObjectPropertyAtPut( + JSGeneratorObject::kResultDonePropertyIndex, *ToBoolean(done)); + return result; } Handle<ScopeInfo> Factory::NewScopeInfo(int length) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateScopeInfo(length), - ScopeInfo); + Handle<FixedArray> array = NewFixedArray(length, TENURED); + array->set_map_no_write_barrier(*scope_info_map()); + Handle<ScopeInfo> scope_info = Handle<ScopeInfo>::cast(array); + return scope_info; } Handle<JSObject> Factory::NewExternal(void* value) { + Handle<Foreign> foreign = NewForeign(static_cast<Address>(value)); + Handle<JSObject> external = NewJSObjectFromMap(external_map()); + external->SetInternalField(0, *foreign); + return external; +} + + +Handle<Code> Factory::NewCodeRaw(int object_size, bool immovable) { CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateExternal(value), - JSObject); + isolate()->heap()->AllocateCode(object_size, immovable), + Code); } @@ -1294,11 +1424,60 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc, bool immovable, bool crankshafted, int prologue_offset) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->CreateCode( - desc, flags, self_ref, immovable, crankshafted, - prologue_offset), - Code); + Handle<ByteArray> reloc_info = NewByteArray(desc.reloc_size, TENURED); + Handle<ConstantPoolArray> constant_pool = + desc.origin->NewConstantPool(isolate()); + + // Compute size. + int body_size = RoundUp(desc.instr_size, kObjectAlignment); + int obj_size = Code::SizeFor(body_size); + + Handle<Code> code = NewCodeRaw(obj_size, immovable); + ASSERT(!isolate()->code_range()->exists() || + isolate()->code_range()->contains(code->address())); + + // The code object has not been fully initialized yet. We rely on the + // fact that no allocation will happen from this point on. + DisallowHeapAllocation no_gc; + code->set_gc_metadata(Smi::FromInt(0)); + code->set_ic_age(isolate()->heap()->global_ic_age()); + code->set_instruction_size(desc.instr_size); + code->set_relocation_info(*reloc_info); + code->set_flags(flags); + code->set_raw_kind_specific_flags1(0); + code->set_raw_kind_specific_flags2(0); + code->set_is_crankshafted(crankshafted); + code->set_deoptimization_data(*empty_fixed_array(), SKIP_WRITE_BARRIER); + code->set_raw_type_feedback_info(*undefined_value()); + code->set_next_code_link(*undefined_value()); + code->set_handler_table(*empty_fixed_array(), SKIP_WRITE_BARRIER); + code->set_prologue_offset(prologue_offset); + if (code->kind() == Code::OPTIMIZED_FUNCTION) { + code->set_marked_for_deoptimization(false); + } + + desc.origin->PopulateConstantPool(*constant_pool); + code->set_constant_pool(*constant_pool); + + if (code->kind() == Code::FUNCTION) { + code->set_has_debug_break_slots(isolate()->debugger()->IsDebuggerActive()); + } + + // Allow self references to created code object by patching the handle to + // point to the newly allocated Code object. + if (!self_ref.is_null()) *(self_ref.location()) = *code; + + // Migrate generated code. + // The generated code can contain Object** values (typically from handles) + // that are dereferenced during the copy to point directly to the actual heap + // objects. These pointers can include references to the code object itself, + // through the self_reference parameter. + code->CopyFrom(desc); + +#ifdef VERIFY_HEAP + if (FLAG_verify_heap) code->ObjectVerify(); +#endif + return code; } @@ -1338,28 +1517,14 @@ Handle<JSObject> Factory::NewJSObjectWithMemento( Handle<JSModule> Factory::NewJSModule(Handle<Context> context, Handle<ScopeInfo> scope_info) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateJSModule(*context, *scope_info), JSModule); -} - - -// TODO(mstarzinger): Temporary wrapper until handlified. -static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict, - Handle<Name> name, - Handle<Object> value, - PropertyDetails details) { - CALL_HEAP_FUNCTION(dict->GetIsolate(), - dict->Add(*name, *value, details), - NameDictionary); -} - - -static Handle<GlobalObject> NewGlobalObjectFromMap(Isolate* isolate, - Handle<Map> map) { - CALL_HEAP_FUNCTION(isolate, - isolate->heap()->Allocate(*map, OLD_POINTER_SPACE), - GlobalObject); + // Allocate a fresh map. Modules do not have a prototype. + Handle<Map> map = NewMap(JS_MODULE_TYPE, JSModule::kSize); + // Allocate the object based on the map. + Handle<JSModule> module = + Handle<JSModule>::cast(NewJSObjectFromMap(map, TENURED)); + module->set_context(*context); + module->set_scope_info(*scope_info); + return module; } @@ -1385,7 +1550,8 @@ Handle<GlobalObject> Factory::NewGlobalObject(Handle<JSFunction> constructor) { // Allocate a dictionary object for backing storage. int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size; - Handle<NameDictionary> dictionary = NewNameDictionary(at_least_space_for); + Handle<NameDictionary> dictionary = + NameDictionary::New(isolate(), at_least_space_for); // The global object might be created from an object template with accessors. // Fill these accessors into the dictionary. @@ -1397,11 +1563,12 @@ Handle<GlobalObject> Factory::NewGlobalObject(Handle<JSFunction> constructor) { Handle<Name> name(descs->GetKey(i)); Handle<Object> value(descs->GetCallbacksObject(i), isolate()); Handle<PropertyCell> cell = NewPropertyCell(value); - NameDictionaryAdd(dictionary, name, cell, d); + // |dictionary| already contains enough space for all properties. + USE(NameDictionary::Add(dictionary, name, cell, d)); } // Allocate the global object and initialize it with the backing store. - Handle<GlobalObject> global = NewGlobalObjectFromMap(isolate(), map); + Handle<GlobalObject> global = New<GlobalObject>(map, OLD_POINTER_SPACE); isolate()->heap()->InitializeJSObjectFromMap(*global, *dictionary, *map); // Create a new map for the global object. @@ -1435,21 +1602,24 @@ Handle<JSObject> Factory::NewJSObjectFromMap( Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, + PretenureFlag pretenure) { + Context* native_context = isolate()->context()->native_context(); + JSFunction* array_function = native_context->array_function(); + Map* map = array_function->initial_map(); + Map* transition_map = isolate()->get_initial_js_array_map(elements_kind); + if (transition_map != NULL) map = transition_map; + return Handle<JSArray>::cast(NewJSObjectFromMap(handle(map), pretenure)); +} + + +Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode, PretenureFlag pretenure) { - if (capacity != 0) { - elements_kind = GetHoleyElementsKind(elements_kind); - } - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateJSArrayAndStorage( - elements_kind, - length, - capacity, - mode, - pretenure), - JSArray); + Handle<JSArray> array = NewJSArray(elements_kind, pretenure); + NewJSArrayStorage(array, length, capacity, mode); + return array; } @@ -1458,25 +1628,48 @@ Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements, int length, PretenureFlag pretenure) { ASSERT(length <= elements->length()); - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateJSArrayWithElements(*elements, - elements_kind, - length, - pretenure), - JSArray); + Handle<JSArray> array = NewJSArray(elements_kind, pretenure); + + array->set_elements(*elements); + array->set_length(Smi::FromInt(length)); + JSObject::ValidateElements(array); + return array; } void Factory::NewJSArrayStorage(Handle<JSArray> array, - int length, - int capacity, - ArrayStorageAllocationMode mode) { - CALL_HEAP_FUNCTION_VOID(isolate(), - isolate()->heap()->AllocateJSArrayStorage(*array, - length, - capacity, - mode)); + int length, + int capacity, + ArrayStorageAllocationMode mode) { + ASSERT(capacity >= length); + + if (capacity == 0) { + array->set_length(Smi::FromInt(0)); + array->set_elements(*empty_fixed_array()); + return; + } + + Handle<FixedArrayBase> elms; + ElementsKind elements_kind = array->GetElementsKind(); + if (IsFastDoubleElementsKind(elements_kind)) { + if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { + elms = NewFixedDoubleArray(capacity); + } else { + ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); + elms = NewFixedDoubleArrayWithHoles(capacity); + } + } else { + ASSERT(IsFastSmiOrObjectElementsKind(elements_kind)); + if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { + elms = NewUninitializedFixedArray(capacity); + } else { + ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); + elms = NewFixedArrayWithHoles(capacity); + } + } + + array->set_elements(*elms); + array->set_length(Smi::FromInt(length)); } @@ -1543,26 +1736,152 @@ Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type) { Handle<JSProxy> Factory::NewJSProxy(Handle<Object> handler, Handle<Object> prototype) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateJSProxy(*handler, *prototype), - JSProxy); + // Allocate map. + // TODO(rossberg): Once we optimize proxies, think about a scheme to share + // maps. Will probably depend on the identity of the handler object, too. + Handle<Map> map = NewMap(JS_PROXY_TYPE, JSProxy::kSize); + map->set_prototype(*prototype); + + // Allocate the proxy object. + Handle<JSProxy> result = New<JSProxy>(map, NEW_SPACE); + result->InitializeBody(map->instance_size(), Smi::FromInt(0)); + result->set_handler(*handler); + result->set_hash(*undefined_value(), SKIP_WRITE_BARRIER); + return result; +} + + +Handle<JSProxy> Factory::NewJSFunctionProxy(Handle<Object> handler, + Handle<Object> call_trap, + Handle<Object> construct_trap, + Handle<Object> prototype) { + // Allocate map. + // TODO(rossberg): Once we optimize proxies, think about a scheme to share + // maps. Will probably depend on the identity of the handler object, too. + Handle<Map> map = NewMap(JS_FUNCTION_PROXY_TYPE, JSFunctionProxy::kSize); + map->set_prototype(*prototype); + + // Allocate the proxy object. + Handle<JSFunctionProxy> result = New<JSFunctionProxy>(map, NEW_SPACE); + result->InitializeBody(map->instance_size(), Smi::FromInt(0)); + result->set_handler(*handler); + result->set_hash(*undefined_value(), SKIP_WRITE_BARRIER); + result->set_call_trap(*call_trap); + result->set_construct_trap(*construct_trap); + return result; +} + + +void Factory::ReinitializeJSReceiver(Handle<JSReceiver> object, + InstanceType type, + int size) { + ASSERT(type >= FIRST_JS_OBJECT_TYPE); + + // Allocate fresh map. + // TODO(rossberg): Once we optimize proxies, cache these maps. + Handle<Map> map = NewMap(type, size); + + // Check that the receiver has at least the size of the fresh object. + int size_difference = object->map()->instance_size() - map->instance_size(); + ASSERT(size_difference >= 0); + + map->set_prototype(object->map()->prototype()); + + // Allocate the backing storage for the properties. + int prop_size = map->InitialPropertiesLength(); + Handle<FixedArray> properties = NewFixedArray(prop_size, TENURED); + + Heap* heap = isolate()->heap(); + MaybeHandle<SharedFunctionInfo> shared; + if (type == JS_FUNCTION_TYPE) { + OneByteStringKey key(STATIC_ASCII_VECTOR("<freezing call trap>"), + heap->HashSeed()); + Handle<String> name = InternalizeStringWithKey(&key); + shared = NewSharedFunctionInfo(name); + } + + // In order to keep heap in consistent state there must be no allocations + // before object re-initialization is finished and filler object is installed. + DisallowHeapAllocation no_allocation; + + // Reset the map for the object. + object->set_map(*map); + Handle<JSObject> jsobj = Handle<JSObject>::cast(object); + + // Reinitialize the object from the constructor map. + heap->InitializeJSObjectFromMap(*jsobj, *properties, *map); + + // Functions require some minimal initialization. + if (type == JS_FUNCTION_TYPE) { + map->set_function_with_prototype(true); + Handle<JSFunction> js_function = Handle<JSFunction>::cast(object); + Handle<Context> context(isolate()->context()->native_context()); + InitializeFunction(js_function, shared.ToHandleChecked(), context); + } + + // Put in filler if the new object is smaller than the old. + if (size_difference > 0) { + heap->CreateFillerObjectAt( + object->address() + map->instance_size(), size_difference); + } +} + + +void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object, + Handle<JSFunction> constructor) { + ASSERT(constructor->has_initial_map()); + Handle<Map> map(constructor->initial_map(), isolate()); + + // The proxy's hash should be retained across reinitialization. + Handle<Object> hash(object->hash(), isolate()); + + // Check that the already allocated object has the same size and type as + // objects allocated using the constructor. + ASSERT(map->instance_size() == object->map()->instance_size()); + ASSERT(map->instance_type() == object->map()->instance_type()); + + // Allocate the backing storage for the properties. + int prop_size = map->InitialPropertiesLength(); + Handle<FixedArray> properties = NewFixedArray(prop_size, TENURED); + + // In order to keep heap in consistent state there must be no allocations + // before object re-initialization is finished. + DisallowHeapAllocation no_allocation; + + // Reset the map for the object. + object->set_map(constructor->initial_map()); + + Heap* heap = isolate()->heap(); + // Reinitialize the object from the constructor map. + heap->InitializeJSObjectFromMap(*object, *properties, *map); + + // Restore the saved hash. + object->set_hash(*hash); } void Factory::BecomeJSObject(Handle<JSReceiver> object) { - CALL_HEAP_FUNCTION_VOID( - isolate(), - isolate()->heap()->ReinitializeJSReceiver( - *object, JS_OBJECT_TYPE, JSObject::kHeaderSize)); + ReinitializeJSReceiver(object, JS_OBJECT_TYPE, JSObject::kHeaderSize); } void Factory::BecomeJSFunction(Handle<JSReceiver> object) { - CALL_HEAP_FUNCTION_VOID( + ReinitializeJSReceiver(object, JS_FUNCTION_TYPE, JSFunction::kSize); +} + + +Handle<FixedArray> Factory::NewTypeFeedbackVector(int slot_count) { + // Ensure we can skip the write barrier + ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), + *TypeFeedbackInfo::UninitializedSentinel(isolate())); + + CALL_HEAP_FUNCTION( isolate(), - isolate()->heap()->ReinitializeJSReceiver( - *object, JS_FUNCTION_TYPE, JSFunction::kSize)); + isolate()->heap()->AllocateFixedArrayWithFiller( + slot_count, + TENURED, + *TypeFeedbackInfo::UninitializedSentinel(isolate())), + FixedArray); } @@ -1571,10 +1890,12 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo( int number_of_literals, bool is_generator, Handle<Code> code, - Handle<ScopeInfo> scope_info) { + Handle<ScopeInfo> scope_info, + Handle<FixedArray> feedback_vector) { Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name); shared->set_code(*code); shared->set_scope_info(*scope_info); + shared->set_feedback_vector(*feedback_vector); int literals_array_size = number_of_literals; // If the function contains object, regexp or array literals, // allocate extra space for a literals array prefix containing the @@ -1598,113 +1919,137 @@ Handle<JSMessageObject> Factory::NewJSMessageObject( int end_position, Handle<Object> script, Handle<Object> stack_frames) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateJSMessageObject(*type, - *arguments, - start_position, - end_position, - *script, - *stack_frames), - JSMessageObject); + Handle<Map> map = message_object_map(); + Handle<JSMessageObject> message = New<JSMessageObject>(map, NEW_SPACE); + message->set_properties(*empty_fixed_array(), SKIP_WRITE_BARRIER); + message->initialize_elements(); + message->set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER); + message->set_type(*type); + message->set_arguments(*arguments); + message->set_start_position(start_position); + message->set_end_position(end_position); + message->set_script(*script); + message->set_stack_frames(*stack_frames); + return message; } Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(Handle<String> name) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateSharedFunctionInfo(*name), - SharedFunctionInfo); -} - - -Handle<String> Factory::NumberToString(Handle<Object> number) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->NumberToString(*number), String); -} - - -Handle<String> Factory::Uint32ToString(uint32_t value) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->Uint32ToString(value), String); -} - - -Handle<SeededNumberDictionary> Factory::DictionaryAtNumberPut( - Handle<SeededNumberDictionary> dictionary, - uint32_t key, - Handle<Object> value) { - CALL_HEAP_FUNCTION(isolate(), - dictionary->AtNumberPut(key, *value), - SeededNumberDictionary); -} - - -Handle<UnseededNumberDictionary> Factory::DictionaryAtNumberPut( - Handle<UnseededNumberDictionary> dictionary, - uint32_t key, - Handle<Object> value) { - CALL_HEAP_FUNCTION(isolate(), - dictionary->AtNumberPut(key, *value), - UnseededNumberDictionary); -} - - -Handle<JSFunction> Factory::NewFunctionHelper(Handle<String> name, - Handle<Object> prototype) { - Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name); - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateFunction(*isolate()->sloppy_function_map(), - *function_share, - *prototype), - JSFunction); -} - - -Handle<JSFunction> Factory::NewFunction(Handle<String> name, - Handle<Object> prototype) { - Handle<JSFunction> fun = NewFunctionHelper(name, prototype); - fun->set_context(isolate()->context()->native_context()); - return fun; -} - - -Handle<JSFunction> Factory::NewFunctionWithoutPrototypeHelper( - Handle<String> name, - StrictMode strict_mode) { - Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name); - Handle<Map> map = strict_mode == SLOPPY - ? isolate()->sloppy_function_without_prototype_map() - : isolate()->strict_function_without_prototype_map(); - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->AllocateFunction( - *map, - *function_share, - *the_hole_value()), - JSFunction); + Handle<Map> map = shared_function_info_map(); + Handle<SharedFunctionInfo> share = New<SharedFunctionInfo>(map, + OLD_POINTER_SPACE); + + // Set pointer fields. + share->set_name(*name); + Code* illegal = isolate()->builtins()->builtin(Builtins::kIllegal); + share->set_code(illegal); + share->set_optimized_code_map(Smi::FromInt(0)); + share->set_scope_info(ScopeInfo::Empty(isolate())); + Code* construct_stub = + isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric); + share->set_construct_stub(construct_stub); + share->set_instance_class_name(*Object_string()); + share->set_function_data(*undefined_value(), SKIP_WRITE_BARRIER); + share->set_script(*undefined_value(), SKIP_WRITE_BARRIER); + share->set_debug_info(*undefined_value(), SKIP_WRITE_BARRIER); + share->set_inferred_name(*empty_string(), SKIP_WRITE_BARRIER); + share->set_feedback_vector(*empty_fixed_array(), SKIP_WRITE_BARRIER); + share->set_initial_map(*undefined_value(), SKIP_WRITE_BARRIER); + share->set_profiler_ticks(0); + share->set_ast_node_count(0); + share->set_counters(0); + + // Set integer fields (smi or int, depending on the architecture). + share->set_length(0); + share->set_formal_parameter_count(0); + share->set_expected_nof_properties(0); + share->set_num_literals(0); + share->set_start_position_and_type(0); + share->set_end_position(0); + share->set_function_token_position(0); + // All compiler hints default to false or 0. + share->set_compiler_hints(0); + share->set_opt_count_and_bailout_reason(0); + + return share; +} + + +static inline int NumberCacheHash(Handle<FixedArray> cache, + Handle<Object> number) { + int mask = (cache->length() >> 1) - 1; + if (number->IsSmi()) { + return Handle<Smi>::cast(number)->value() & mask; + } else { + DoubleRepresentation rep(number->Number()); + return + (static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32)) & mask; + } } -Handle<JSFunction> Factory::NewFunctionWithoutPrototype( - Handle<String> name, - StrictMode strict_mode) { - Handle<JSFunction> fun = NewFunctionWithoutPrototypeHelper(name, strict_mode); - fun->set_context(isolate()->context()->native_context()); - return fun; +Handle<Object> Factory::GetNumberStringCache(Handle<Object> number) { + DisallowHeapAllocation no_gc; + int hash = NumberCacheHash(number_string_cache(), number); + Object* key = number_string_cache()->get(hash * 2); + if (key == *number || (key->IsHeapNumber() && number->IsHeapNumber() && + key->Number() == number->Number())) { + return Handle<String>( + String::cast(number_string_cache()->get(hash * 2 + 1)), isolate()); + } + return undefined_value(); +} + + +void Factory::SetNumberStringCache(Handle<Object> number, + Handle<String> string) { + int hash = NumberCacheHash(number_string_cache(), number); + if (number_string_cache()->get(hash * 2) != *undefined_value()) { + int full_size = isolate()->heap()->FullSizeNumberStringCacheLength(); + if (number_string_cache()->length() != full_size) { + // The first time we have a hash collision, we move to the full sized + // number string cache. The idea is to have a small number string + // cache in the snapshot to keep boot-time memory usage down. + // If we expand the number string cache already while creating + // the snapshot then that didn't work out. + ASSERT(!Serializer::enabled(isolate()) || FLAG_extra_code != NULL); + Handle<FixedArray> new_cache = NewFixedArray(full_size, TENURED); + isolate()->heap()->set_number_string_cache(*new_cache); + return; + } + } + number_string_cache()->set(hash * 2, *number); + number_string_cache()->set(hash * 2 + 1, *string); } -Handle<Object> Factory::ToObject(Handle<Object> object) { - CALL_HEAP_FUNCTION(isolate(), object->ToObject(isolate()), Object); -} +Handle<String> Factory::NumberToString(Handle<Object> number, + bool check_number_string_cache) { + isolate()->counters()->number_to_string_runtime()->Increment(); + if (check_number_string_cache) { + Handle<Object> cached = GetNumberStringCache(number); + if (!cached->IsUndefined()) return Handle<String>::cast(cached); + } + char arr[100]; + Vector<char> buffer(arr, ARRAY_SIZE(arr)); + const char* str; + if (number->IsSmi()) { + int num = Handle<Smi>::cast(number)->value(); + str = IntToCString(num, buffer); + } else { + double num = Handle<HeapNumber>::cast(number)->value(); + str = DoubleToCString(num, buffer); + } -Handle<Object> Factory::ToObject(Handle<Object> object, - Handle<Context> native_context) { - CALL_HEAP_FUNCTION(isolate(), object->ToObject(*native_context), Object); + // We tenure the allocated string since it is referenced from the + // number-string cache which lives in the old space. + Handle<String> js_string = NewStringFromAsciiChecked(str, TENURED); + SetNumberStringCache(number, js_string); + return js_string; } -#ifdef ENABLE_DEBUGGER_SUPPORT Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) { // Get the original code of the function. Handle<Code> code(shared->code()); @@ -1734,7 +2079,6 @@ Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) { return debug_info; } -#endif Handle<JSObject> Factory::NewArgumentsObject(Handle<Object> callee, @@ -1746,7 +2090,9 @@ Handle<JSObject> Factory::NewArgumentsObject(Handle<Object> callee, Handle<JSFunction> Factory::CreateApiFunction( - Handle<FunctionTemplateInfo> obj, ApiInstanceType instance_type) { + Handle<FunctionTemplateInfo> obj, + Handle<Object> prototype, + ApiInstanceType instance_type) { Handle<Code> code = isolate()->builtins()->HandleApiCall(); Handle<Code> construct_stub = isolate()->builtins()->JSConstructStubApi(); @@ -1782,24 +2128,33 @@ Handle<JSFunction> Factory::CreateApiFunction( break; } - Handle<JSFunction> result = - NewFunction(Factory::empty_string(), - type, - instance_size, - code, - true); + MaybeHandle<Object> maybe_prototype = prototype; + if (obj->remove_prototype()) maybe_prototype = MaybeHandle<Object>(); - // Set length. - result->shared()->set_length(obj->length()); + Handle<JSFunction> result = NewFunction( + maybe_prototype, Factory::empty_string(), type, + instance_size, code, !obj->remove_prototype()); - // Set class name. - Handle<Object> class_name = Handle<Object>(obj->class_name(), isolate()); + result->shared()->set_length(obj->length()); + Handle<Object> class_name(obj->class_name(), isolate()); if (class_name->IsString()) { result->shared()->set_instance_class_name(*class_name); result->shared()->set_name(*class_name); } + result->shared()->set_function_data(*obj); + result->shared()->set_construct_stub(*construct_stub); + result->shared()->DontAdaptArguments(); + + if (obj->remove_prototype()) { + ASSERT(result->shared()->IsApiFunction()); + ASSERT(!result->has_initial_map()); + ASSERT(!result->has_prototype()); + return result; + } + // Down from here is only valid for API functions that can be used as a + // constructor (don't set the "remove prototype" flag). - Handle<Map> map = Handle<Map>(result->initial_map()); + Handle<Map> map(result->initial_map()); // Mark as undetectable if needed. if (obj->undetectable()) { @@ -1829,10 +2184,6 @@ Handle<JSFunction> Factory::CreateApiFunction( map->set_has_instance_call_handler(); } - result->shared()->set_function_data(*obj); - result->shared()->set_construct_stub(*construct_stub); - result->shared()->DontAdaptArguments(); - // Recursively copy parent instance templates' accessors, // 'data' may be modified. int max_number_of_additional_properties = 0; @@ -1899,7 +2250,7 @@ Handle<JSFunction> Factory::CreateApiFunction( // Install accumulated static accessors for (int i = 0; i < valid_descriptors; i++) { Handle<AccessorInfo> accessor(AccessorInfo::cast(array->get(i))); - JSObject::SetAccessor(result, accessor); + JSObject::SetAccessor(result, accessor).Assert(); } ASSERT(result->shared()->IsApiFunction()); @@ -1907,32 +2258,13 @@ Handle<JSFunction> Factory::CreateApiFunction( } -Handle<MapCache> Factory::NewMapCache(int at_least_space_for) { - CALL_HEAP_FUNCTION(isolate(), - MapCache::Allocate(isolate()->heap(), - at_least_space_for), - MapCache); -} - - -MUST_USE_RESULT static MaybeObject* UpdateMapCacheWith(Context* context, - FixedArray* keys, - Map* map) { - Object* result; - { MaybeObject* maybe_result = - MapCache::cast(context->map_cache())->Put(keys, map); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - context->set_map_cache(MapCache::cast(result)); - return result; -} - - Handle<MapCache> Factory::AddToMapCache(Handle<Context> context, Handle<FixedArray> keys, Handle<Map> map) { - CALL_HEAP_FUNCTION(isolate(), - UpdateMapCacheWith(*context, *keys, *map), MapCache); + Handle<MapCache> map_cache = handle(MapCache::cast(context->map_cache())); + Handle<MapCache> result = MapCache::Put(map_cache, keys, map); + context->set_map_cache(*result); + return result; } @@ -1940,7 +2272,7 @@ Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<Context> context, Handle<FixedArray> keys) { if (context->map_cache()->IsUndefined()) { // Allocate the new map cache for the native context. - Handle<MapCache> new_cache = NewMapCache(24); + Handle<MapCache> new_cache = MapCache::New(isolate(), 24); context->set_map_cache(*new_cache); } // Check to see whether there is a matching element in the cache. @@ -1949,11 +2281,10 @@ Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<Context> context, Handle<Object> result = Handle<Object>(cache->Lookup(*keys), isolate()); if (result->IsMap()) return Handle<Map>::cast(result); // Create a new map and add it to the cache. - Handle<Map> map = - CopyMap(Handle<Map>(context->object_function()->initial_map()), - keys->length()); + Handle<Map> map = Map::Create( + handle(context->object_function()), keys->length()); AddToMapCache(context, keys, map); - return Handle<Map>(map); + return map; } @@ -1993,28 +2324,25 @@ void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp, -void Factory::ConfigureInstance(Handle<FunctionTemplateInfo> desc, - Handle<JSObject> instance, - bool* pending_exception) { +MaybeHandle<FunctionTemplateInfo> Factory::ConfigureInstance( + Handle<FunctionTemplateInfo> desc, Handle<JSObject> instance) { // Configure the instance by adding the properties specified by the // instance template. Handle<Object> instance_template(desc->instance_template(), isolate()); if (!instance_template->IsUndefined()) { - Execution::ConfigureInstance(isolate(), - instance, - instance_template, - pending_exception); - } else { - *pending_exception = false; + RETURN_ON_EXCEPTION( + isolate(), + Execution::ConfigureInstance(isolate(), instance, instance_template), + FunctionTemplateInfo); } + return desc; } Handle<Object> Factory::GlobalConstantFor(Handle<String> name) { - Heap* h = isolate()->heap(); - if (name->Equals(h->undefined_string())) return undefined_value(); - if (name->Equals(h->nan_string())) return nan_value(); - if (name->Equals(h->infinity_string())) return infinity_value(); + if (String::Equals(name, undefined_string())) return undefined_value(); + if (String::Equals(name, nan_string())) return nan_value(); + if (String::Equals(name, infinity_string())) return infinity_value(); return Handle<Object>::null(); } diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h index 00f20ff8b..91a036cca 100644 --- a/deps/v8/src/factory.h +++ b/deps/v8/src/factory.h @@ -1,48 +1,23 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FACTORY_H_ #define V8_FACTORY_H_ -#include "globals.h" -#include "handles.h" -#include "heap.h" +#include "isolate.h" namespace v8 { namespace internal { // Interface for handle based allocation. -class Factory { +class Factory V8_FINAL { public: - // Allocate a new boxed value. - Handle<Box> NewBox( - Handle<Object> value, - PretenureFlag pretenure = NOT_TENURED); + Handle<Oddball> NewOddball(Handle<Map> map, + const char* to_string, + Handle<Object> to_number, + byte kind); // Allocates a fixed array initialized with undefined values. Handle<FixedArray> NewFixedArray( @@ -58,7 +33,14 @@ class Factory { Handle<FixedArray> NewUninitializedFixedArray(int size); // Allocate a new uninitialized fixed double array. - Handle<FixedDoubleArray> NewFixedDoubleArray( + // The function returns a pre-allocated empty fixed array for capacity = 0, + // so the return type must be the general fixed array class. + Handle<FixedArrayBase> NewFixedDoubleArray( + int size, + PretenureFlag pretenure = NOT_TENURED); + + // Allocate a new fixed double array with hole values. + Handle<FixedArrayBase> NewFixedDoubleArrayWithHoles( int size, PretenureFlag pretenure = NOT_TENURED); @@ -68,35 +50,20 @@ class Factory { int number_of_heap_ptr_entries, int number_of_int32_entries); - Handle<SeededNumberDictionary> NewSeededNumberDictionary( - int at_least_space_for); - - Handle<UnseededNumberDictionary> NewUnseededNumberDictionary( - int at_least_space_for); - - Handle<NameDictionary> NewNameDictionary(int at_least_space_for); + Handle<OrderedHashSet> NewOrderedHashSet(); + Handle<OrderedHashMap> NewOrderedHashMap(); - Handle<ObjectHashSet> NewObjectHashSet(int at_least_space_for); + // Create a new boxed value. + Handle<Box> NewBox(Handle<Object> value); - Handle<ObjectHashTable> NewObjectHashTable( - int at_least_space_for, - MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY); - - Handle<WeakHashTable> NewWeakHashTable(int at_least_space_for); - - Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors, - int slack = 0); - Handle<DeoptimizationInputData> NewDeoptimizationInputData( - int deopt_entry_count, - PretenureFlag pretenure); - Handle<DeoptimizationOutputData> NewDeoptimizationOutputData( - int deopt_entry_count, - PretenureFlag pretenure); - // Allocates a pre-tenured empty AccessorPair. + // Create a pre-tenured empty AccessorPair. Handle<AccessorPair> NewAccessorPair(); + // Create an empty TypeFeedbackInfo. Handle<TypeFeedbackInfo> NewTypeFeedbackInfo(); + // Finds the internalized copy for string in the string table. + // If not found, a new string is added to the table and returned. Handle<String> InternalizeUtf8String(Vector<const char> str); Handle<String> InternalizeUtf8String(const char* str) { return InternalizeUtf8String(CStrVector(str)); @@ -134,11 +101,28 @@ class Factory { // two byte. // // ASCII strings are pretenured when used as keys in the SourceCodeCache. - Handle<String> NewStringFromOneByte( + MUST_USE_RESULT MaybeHandle<String> NewStringFromOneByte( Vector<const uint8_t> str, PretenureFlag pretenure = NOT_TENURED); + + template<size_t N> + inline Handle<String> NewStringFromStaticAscii( + const char (&str)[N], + PretenureFlag pretenure = NOT_TENURED) { + ASSERT(N == StrLength(str) + 1); + return NewStringFromOneByte( + STATIC_ASCII_VECTOR(str), pretenure).ToHandleChecked(); + } + + inline Handle<String> NewStringFromAsciiChecked( + const char* str, + PretenureFlag pretenure = NOT_TENURED) { + return NewStringFromOneByte( + OneByteVector(str), pretenure).ToHandleChecked(); + } + // TODO(dcarney): remove this function. - inline Handle<String> NewStringFromAscii( + MUST_USE_RESULT inline MaybeHandle<String> NewStringFromAscii( Vector<const char> str, PretenureFlag pretenure = NOT_TENURED) { return NewStringFromOneByte(Vector<const uint8_t>::cast(str), pretenure); @@ -146,29 +130,54 @@ class Factory { // UTF8 strings are pretenured when used for regexp literal patterns and // flags in the parser. - Handle<String> NewStringFromUtf8( + MUST_USE_RESULT MaybeHandle<String> NewStringFromUtf8( Vector<const char> str, PretenureFlag pretenure = NOT_TENURED); - Handle<String> NewStringFromTwoByte( + MUST_USE_RESULT MaybeHandle<String> NewStringFromTwoByte( Vector<const uc16> str, PretenureFlag pretenure = NOT_TENURED); + // Allocates an internalized string in old space based on the character + // stream. + MUST_USE_RESULT Handle<String> NewInternalizedStringFromUtf8( + Vector<const char> str, + int chars, + uint32_t hash_field); + + MUST_USE_RESULT Handle<String> NewOneByteInternalizedString( + Vector<const uint8_t> str, + uint32_t hash_field); + + MUST_USE_RESULT Handle<String> NewTwoByteInternalizedString( + Vector<const uc16> str, + uint32_t hash_field); + + MUST_USE_RESULT Handle<String> NewInternalizedStringImpl( + Handle<String> string, int chars, uint32_t hash_field); + + // Compute the matching internalized string map for a string if possible. + // Empty handle is returned if string is in new space or not flattened. + MUST_USE_RESULT MaybeHandle<Map> InternalizedStringMapForString( + Handle<String> string); + // Allocates and partially initializes an ASCII or TwoByte String. The // characters of the string are uninitialized. Currently used in regexp code // only, where they are pretenured. - Handle<SeqOneByteString> NewRawOneByteString( + MUST_USE_RESULT MaybeHandle<SeqOneByteString> NewRawOneByteString( int length, PretenureFlag pretenure = NOT_TENURED); - Handle<SeqTwoByteString> NewRawTwoByteString( + MUST_USE_RESULT MaybeHandle<SeqTwoByteString> NewRawTwoByteString( int length, PretenureFlag pretenure = NOT_TENURED); - // Create a new cons string object which consists of a pair of strings. - Handle<String> NewConsString(Handle<String> left, - Handle<String> right); + // Creates a single character string where the character has given code. + // A cache is used for ASCII codes. + Handle<String> LookupSingleCharacterStringFromCode(uint32_t code); - Handle<ConsString> NewRawConsString(String::Encoding encoding); + // Create a new cons string object which consists of a pair of strings. + MUST_USE_RESULT MaybeHandle<String> NewConsString(Handle<String> left, + Handle<String> right); // Create a new sequential string containing the concatenation of the inputs. Handle<String> NewFlatConcatString(Handle<String> first, @@ -185,15 +194,14 @@ class Factory { return NewProperSubString(str, begin, end); } - Handle<SlicedString> NewRawSlicedString(String::Encoding encoding); - // Creates a new external String object. There are two String encodings // in the system: ASCII and two byte. Unlike other String types, it does // not make sense to have a UTF-8 factory function for external strings, - // because we cannot change the underlying buffer. - Handle<String> NewExternalStringFromAscii( + // because we cannot change the underlying buffer. Note that these strings + // are backed by a string resource that resides outside the V8 heap. + MUST_USE_RESULT MaybeHandle<String> NewExternalStringFromAscii( const ExternalAsciiString::Resource* resource); - Handle<String> NewExternalStringFromTwoByte( + MUST_USE_RESULT MaybeHandle<String> NewExternalStringFromTwoByte( const ExternalTwoByteString::Resource* resource); // Create a symbol. @@ -222,7 +230,7 @@ class Factory { // Create a 'with' context. Handle<Context> NewWithContext(Handle<JSFunction> function, Handle<Context> previous, - Handle<JSObject> extension); + Handle<JSReceiver> extension); // Create a block context. Handle<Context> NewBlockContext(Handle<JSFunction> function, @@ -233,6 +241,8 @@ class Factory { // the old generation). Handle<Struct> NewStruct(InstanceType type); + Handle<CodeCache> NewCodeCache(); + Handle<AliasedArgumentsEntry> NewAliasedArgumentsEntry( int aliased_context_slot); @@ -272,6 +282,7 @@ class Factory { Handle<PropertyCell> NewPropertyCell(Handle<Object> value); + // Allocate a tenured AllocationSite. It's payload is null. Handle<AllocationSite> NewAllocationSite(); Handle<Map> NewMap( @@ -279,14 +290,19 @@ class Factory { int instance_size, ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); + Handle<HeapObject> NewFillerObject(int size, + bool double_align, + AllocationSpace space); + Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function); - Handle<Map> CopyWithPreallocatedFieldDescriptors(Handle<Map> map); + Handle<JSObject> CopyJSObject(Handle<JSObject> object); + + Handle<JSObject> CopyJSObjectWithAllocationSite(Handle<JSObject> object, + Handle<AllocationSite> site); - // Copy the map adding more inobject properties if possible without - // overflowing the instance size. - Handle<Map> CopyMap(Handle<Map> map, int extra_inobject_props); - Handle<Map> CopyMap(Handle<Map> map); + Handle<FixedArray> CopyFixedArrayWithMap(Handle<FixedArray> array, + Handle<Map> map); Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array); @@ -294,10 +310,6 @@ class Factory { // of it in old space. Handle<FixedArray> CopyAndTenureFixedCOWArray(Handle<FixedArray> array); - Handle<FixedArray> CopySizeFixedArray(Handle<FixedArray> array, - int new_length, - PretenureFlag pretenure = NOT_TENURED); - Handle<FixedDoubleArray> CopyFixedDoubleArray( Handle<FixedDoubleArray> array); @@ -305,6 +317,7 @@ class Factory { Handle<ConstantPoolArray> array); // Numbers (e.g. literals) are pretenured by the parser. + // The return value may be a smi or a heap number. Handle<Object> NewNumber(double value, PretenureFlag pretenure = NOT_TENURED); @@ -312,15 +325,23 @@ class Factory { PretenureFlag pretenure = NOT_TENURED); Handle<Object> NewNumberFromUint(uint32_t value, PretenureFlag pretenure = NOT_TENURED); - inline Handle<Object> NewNumberFromSize(size_t value, - PretenureFlag pretenure = NOT_TENURED); + Handle<Object> NewNumberFromSize(size_t value, + PretenureFlag pretenure = NOT_TENURED) { + if (Smi::IsValid(static_cast<intptr_t>(value))) { + return Handle<Object>(Smi::FromIntptr(static_cast<intptr_t>(value)), + isolate()); + } + return NewNumber(static_cast<double>(value), pretenure); + } Handle<HeapNumber> NewHeapNumber(double value, PretenureFlag pretenure = NOT_TENURED); // These objects are used by the api to create env-independent data // structures in the heap. - Handle<JSObject> NewNeanderObject(); + inline Handle<JSObject> NewNeanderObject() { + return NewJSObjectFromMap(neander_map()); + } Handle<JSObject> NewArgumentsObject(Handle<Object> callee, int length); @@ -343,14 +364,19 @@ class Factory { bool allocate_properties = true, Handle<AllocationSite> allocation_site = Handle<AllocationSite>::null()); - Handle<JSObject> NewJSObjectFromMapForDeoptimizer( - Handle<Map> map, PretenureFlag pretenure = NOT_TENURED); - // JS modules are pretenured. Handle<JSModule> NewJSModule(Handle<Context> context, Handle<ScopeInfo> scope_info); // JS arrays are pretenured when allocated by the parser. + + // Create a JSArray with no elements. + Handle<JSArray> NewJSArray( + ElementsKind elements_kind, + PretenureFlag pretenure = NOT_TENURED); + + // Create a JSArray with a specified length and elements initialized + // according to the specified mode. Handle<JSArray> NewJSArray( ElementsKind elements_kind, int length, @@ -362,10 +388,14 @@ class Factory { int capacity, ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND, PretenureFlag pretenure = NOT_TENURED) { + if (capacity != 0) { + elements_kind = GetHoleyElementsKind(elements_kind); + } return NewJSArray(elements_kind, 0, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE, pretenure); } + // Create a JSArray with the given elements. Handle<JSArray> NewJSArrayWithElements( Handle<FixedArrayBase> elements, ElementsKind elements_kind, @@ -394,35 +424,74 @@ class Factory { Handle<JSDataView> NewJSDataView(); + // Allocates a Harmony proxy. Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype); + // Allocates a Harmony function proxy. + Handle<JSProxy> NewJSFunctionProxy(Handle<Object> handler, + Handle<Object> call_trap, + Handle<Object> construct_trap, + Handle<Object> prototype); + + // Reinitialize a JSReceiver into an (empty) JS object of respective type and + // size, but keeping the original prototype. The receiver must have at least + // the size of the new object. The object is reinitialized and behaves as an + // object that has been freshly allocated. + void ReinitializeJSReceiver( + Handle<JSReceiver> object, InstanceType type, int size); + + // Reinitialize an JSGlobalProxy based on a constructor. The object + // must have the same size as objects allocated using the + // constructor. The object is reinitialized and behaves as an + // object that has been freshly allocated using the constructor. + void ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> global, + Handle<JSFunction> constructor); + // Change the type of the argument into a JS object/function and reinitialize. void BecomeJSObject(Handle<JSReceiver> object); void BecomeJSFunction(Handle<JSReceiver> object); Handle<JSFunction> NewFunction(Handle<String> name, - Handle<Object> prototype); - - Handle<JSFunction> NewFunctionWithoutPrototype( - Handle<String> name, - StrictMode strict_mode); - - Handle<JSFunction> NewFunction(Handle<Object> super, bool is_global); + Handle<Code> code, + MaybeHandle<Object> maybe_prototype = + MaybeHandle<Object>()); - Handle<JSFunction> BaseNewFunctionFromSharedFunctionInfo( - Handle<SharedFunctionInfo> function_info, - Handle<Map> function_map, - PretenureFlag pretenure); + Handle<JSFunction> NewFunctionWithPrototype(Handle<String> name, + Handle<Object> prototype); Handle<JSFunction> NewFunctionFromSharedFunctionInfo( Handle<SharedFunctionInfo> function_info, Handle<Context> context, PretenureFlag pretenure = TENURED); + Handle<JSFunction> NewFunction(MaybeHandle<Object> maybe_prototype, + Handle<String> name, + InstanceType type, + int instance_size, + Handle<Code> code, + bool force_initial_map); + Handle<JSFunction> NewFunction(Handle<String> name, + InstanceType type, + int instance_size, + Handle<Code> code, + bool force_initial_map); + + Handle<JSFunction> NewFunctionWithPrototype(Handle<String> name, + InstanceType type, + int instance_size, + Handle<JSObject> prototype, + Handle<Code> code, + bool force_initial_map); + + // Create a serialized scope info. Handle<ScopeInfo> NewScopeInfo(int length); + // Create an External object for V8's external API. Handle<JSObject> NewExternal(void* value); + // The reference to the Code object is stored in self_reference. + // This allows generated code to reference its own Code object + // by containing this handle. Handle<Code> NewCode(const CodeDesc& desc, Code::Flags flags, Handle<Object> self_reference, @@ -434,10 +503,6 @@ class Factory { Handle<Code> CopyCode(Handle<Code> code, Vector<byte> reloc_info); - Handle<Object> ToObject(Handle<Object> object); - Handle<Object> ToObject(Handle<Object> object, - Handle<Context> native_context); - // Interface for creating error objects. Handle<Object> NewError(const char* maker, const char* message, @@ -459,6 +524,11 @@ class Factory { Vector< Handle<Object> > args); Handle<Object> NewRangeError(Handle<String> message); + Handle<Object> NewInvalidStringLengthError() { + return NewRangeError("invalid_string_length", + HandleVector<Object>(NULL, 0)); + } + Handle<Object> NewSyntaxError(const char* message, Handle<JSArray> args); Handle<Object> NewSyntaxError(Handle<String> message); @@ -470,29 +540,14 @@ class Factory { Handle<Object> NewEvalError(const char* message, Vector< Handle<Object> > args); + Handle<JSObject> NewIteratorResultObject(Handle<Object> value, bool done); - Handle<JSFunction> NewFunction(Handle<String> name, - InstanceType type, - int instance_size, - Handle<Code> code, - bool force_initial_map); + Handle<String> NumberToString(Handle<Object> number, + bool check_number_string_cache = true); - Handle<JSFunction> NewFunction(Handle<Map> function_map, - Handle<SharedFunctionInfo> shared, Handle<Object> prototype); - - - Handle<JSFunction> NewFunctionWithPrototype(Handle<String> name, - InstanceType type, - int instance_size, - Handle<JSObject> prototype, - Handle<Code> code, - bool force_initial_map); - - Handle<JSFunction> NewFunctionWithoutPrototype(Handle<String> name, - Handle<Code> code); - - Handle<String> NumberToString(Handle<Object> number); - Handle<String> Uint32ToString(uint32_t value); + Handle<String> Uint32ToString(uint32_t value) { + return NumberToString(NewNumberFromUint(value)); + } enum ApiInstanceType { JavaScriptObject, @@ -502,6 +557,7 @@ class Factory { Handle<JSFunction> CreateApiFunction( Handle<FunctionTemplateInfo> data, + Handle<Object> prototype, ApiInstanceType type = JavaScriptObject); Handle<JSFunction> InstallMembers(Handle<JSFunction> function); @@ -509,9 +565,8 @@ class Factory { // Installs interceptors on the instance. 'desc' is a function template, // and instance is an object instance created by the function of this // function template. - void ConfigureInstance(Handle<FunctionTemplateInfo> desc, - Handle<JSObject> instance, - bool* pending_exception); + MUST_USE_RESULT MaybeHandle<FunctionTemplateInfo> ConfigureInstance( + Handle<FunctionTemplateInfo> desc, Handle<JSObject> instance); #define ROOT_ACCESSOR(type, name, camel_name) \ inline Handle<type> name() { \ @@ -537,18 +592,28 @@ class Factory { INTERNALIZED_STRING_LIST(STRING_ACCESSOR) #undef STRING_ACCESSOR + inline void set_string_table(Handle<StringTable> table) { + isolate()->heap()->set_string_table(*table); + } + Handle<String> hidden_string() { return Handle<String>(&isolate()->heap()->hidden_string_); } + // Allocates a new SharedFunctionInfo object. Handle<SharedFunctionInfo> NewSharedFunctionInfo( Handle<String> name, int number_of_literals, bool is_generator, Handle<Code> code, - Handle<ScopeInfo> scope_info); + Handle<ScopeInfo> scope_info, + Handle<FixedArray> feedback_vector); Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name); + // Allocate a new type feedback vector + Handle<FixedArray> NewTypeFeedbackVector(int slot_count); + + // Allocates a new JSMessageObject object. Handle<JSMessageObject> NewJSMessageObject( Handle<String> type, Handle<JSArray> arguments, @@ -557,19 +622,7 @@ class Factory { Handle<Object> script, Handle<Object> stack_frames); - Handle<SeededNumberDictionary> DictionaryAtNumberPut( - Handle<SeededNumberDictionary>, - uint32_t key, - Handle<Object> value); - - Handle<UnseededNumberDictionary> DictionaryAtNumberPut( - Handle<UnseededNumberDictionary>, - uint32_t key, - Handle<Object> value); - -#ifdef ENABLE_DEBUGGER_SUPPORT Handle<DebugInfo> NewDebugInfo(Handle<SharedFunctionInfo> shared); -#endif // Return a map using the map cache in the native context. // The key the an ordered set of property names. @@ -603,12 +656,33 @@ class Factory { private: Isolate* isolate() { return reinterpret_cast<Isolate*>(this); } - Handle<JSFunction> NewFunctionHelper(Handle<String> name, - Handle<Object> prototype); - - Handle<JSFunction> NewFunctionWithoutPrototypeHelper( - Handle<String> name, - StrictMode strict_mode); + // Creates a heap object based on the map. The fields of the heap object are + // not initialized by New<>() functions. It's the responsibility of the caller + // to do that. + template<typename T> + Handle<T> New(Handle<Map> map, AllocationSpace space); + + template<typename T> + Handle<T> New(Handle<Map> map, + AllocationSpace space, + Handle<AllocationSite> allocation_site); + + // Creates a code object that is not yet fully initialized yet. + inline Handle<Code> NewCodeRaw(int object_size, bool immovable); + + // Initializes a function with a shared part and prototype. + // Note: this code was factored out of NewFunction such that other parts of + // the VM could use it. Specifically, a function that creates instances of + // type JS_FUNCTION_TYPE benefit from the use of this function. + inline void InitializeFunction(Handle<JSFunction> function, + Handle<SharedFunctionInfo> info, + Handle<Context> context); + + // Creates a function initialized with a shared part. + inline Handle<JSFunction> NewFunction(Handle<Map> map, + Handle<SharedFunctionInfo> info, + Handle<Context> context, + PretenureFlag pretenure = TENURED); // Create a new map cache. Handle<MapCache> NewMapCache(int at_least_space_for); @@ -617,19 +691,14 @@ class Factory { Handle<MapCache> AddToMapCache(Handle<Context> context, Handle<FixedArray> keys, Handle<Map> map); -}; + // Attempt to find the number in a small cache. If we finds it, return + // the string representation of the number. Otherwise return undefined. + Handle<Object> GetNumberStringCache(Handle<Object> number); -Handle<Object> Factory::NewNumberFromSize(size_t value, - PretenureFlag pretenure) { - if (Smi::IsValid(static_cast<intptr_t>(value))) { - return Handle<Object>(Smi::FromIntptr(static_cast<intptr_t>(value)), - isolate()); - } else { - return NewNumber(static_cast<double>(value), pretenure); - } -} - + // Update the cache with a new number-string pair. + void SetNumberStringCache(Handle<Object> number, Handle<String> string); +}; } } // namespace v8::internal diff --git a/deps/v8/src/fast-dtoa.cc b/deps/v8/src/fast-dtoa.cc index e62bd01fb..87ad2870d 100644 --- a/deps/v8/src/fast-dtoa.cc +++ b/deps/v8/src/fast-dtoa.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "../include/v8stdint.h" #include "checks.h" diff --git a/deps/v8/src/fast-dtoa.h b/deps/v8/src/fast-dtoa.h index ef2855793..d96c296f1 100644 --- a/deps/v8/src/fast-dtoa.h +++ b/deps/v8/src/fast-dtoa.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FAST_DTOA_H_ #define V8_FAST_DTOA_H_ diff --git a/deps/v8/src/feedback-slots.h b/deps/v8/src/feedback-slots.h index 9760c652b..bc33a4607 100644 --- a/deps/v8/src/feedback-slots.h +++ b/deps/v8/src/feedback-slots.h @@ -1,29 +1,6 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FEEDBACK_SLOTS_H_ #define V8_FEEDBACK_SLOTS_H_ @@ -35,76 +12,16 @@ namespace v8 { namespace internal { -enum ComputablePhase { - DURING_PARSE, - AFTER_SCOPING -}; - - class FeedbackSlotInterface { public: static const int kInvalidFeedbackSlot = -1; virtual ~FeedbackSlotInterface() {} - // When can we ask how many feedback slots are necessary? - virtual ComputablePhase GetComputablePhase() = 0; - virtual int ComputeFeedbackSlotCount(Isolate* isolate) = 0; + virtual int ComputeFeedbackSlotCount() = 0; virtual void SetFirstFeedbackSlot(int slot) = 0; }; - -class DeferredFeedbackSlotProcessor { - public: - DeferredFeedbackSlotProcessor() - : slot_nodes_(NULL), - slot_count_(0) { } - - void add_slot_node(Zone* zone, FeedbackSlotInterface* slot) { - if (slot->GetComputablePhase() == DURING_PARSE) { - // No need to add to the list - int count = slot->ComputeFeedbackSlotCount(zone->isolate()); - slot->SetFirstFeedbackSlot(slot_count_); - slot_count_ += count; - } else { - if (slot_nodes_ == NULL) { - slot_nodes_ = new(zone) ZoneList<FeedbackSlotInterface*>(10, zone); - } - slot_nodes_->Add(slot, zone); - } - } - - void ProcessFeedbackSlots(Isolate* isolate) { - // Scope analysis must have been done. - if (slot_nodes_ == NULL) { - return; - } - - int current_slot = slot_count_; - for (int i = 0; i < slot_nodes_->length(); i++) { - FeedbackSlotInterface* slot_interface = slot_nodes_->at(i); - int count = slot_interface->ComputeFeedbackSlotCount(isolate); - if (count > 0) { - slot_interface->SetFirstFeedbackSlot(current_slot); - current_slot += count; - } - } - - slot_count_ = current_slot; - slot_nodes_->Clear(); - } - - int slot_count() { - ASSERT(slot_count_ >= 0); - return slot_count_; - } - - private: - ZoneList<FeedbackSlotInterface*>* slot_nodes_; - int slot_count_; -}; - - } } // namespace v8::internal #endif // V8_FEEDBACK_SLOTS_H_ diff --git a/deps/v8/src/fixed-dtoa.cc b/deps/v8/src/fixed-dtoa.cc index fd90eca90..014f8ab86 100644 --- a/deps/v8/src/fixed-dtoa.cc +++ b/deps/v8/src/fixed-dtoa.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <cmath> diff --git a/deps/v8/src/fixed-dtoa.h b/deps/v8/src/fixed-dtoa.h index 93f826fe8..b6495c11e 100644 --- a/deps/v8/src/fixed-dtoa.h +++ b/deps/v8/src/fixed-dtoa.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FIXED_DTOA_H_ #define V8_FIXED_DTOA_H_ diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index b93d03b59..30cbcd7bc 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file defines all of the flags. It is separated into different section, // for Debug, Release, Logging and Profiling, etc. To add a new flag, find the @@ -185,6 +162,7 @@ DEFINE_bool(harmony_numeric_literals, false, DEFINE_bool(harmony_strings, false, "enable harmony string") DEFINE_bool(harmony_arrays, false, "enable harmony arrays") DEFINE_bool(harmony_maths, false, "enable harmony math functions") +DEFINE_bool(harmony_promises, true, "(dummy flag, has no effect)") DEFINE_bool(harmony, false, "enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) @@ -226,6 +204,9 @@ DEFINE_bool(track_computed_fields, true, "track computed boilerplate fields") DEFINE_implication(track_double_fields, track_fields) DEFINE_implication(track_heap_object_fields, track_fields) DEFINE_implication(track_computed_fields, track_fields) +DEFINE_bool(track_field_types, true, "track field types") +DEFINE_implication(track_field_types, track_fields) +DEFINE_implication(track_field_types, track_heap_object_fields) DEFINE_bool(smi_binop, true, "support smi representation in binary operations") // Flags for optimization types. @@ -464,9 +445,6 @@ DEFINE_bool(trace_array_abuse, false, "trace out-of-bounds accesses to all arrays") DEFINE_implication(trace_array_abuse, trace_js_array_abuse) DEFINE_implication(trace_array_abuse, trace_external_array_abuse) -DEFINE_bool(debugger_auto_break, true, - "automatically set the debug break flag when debugger commands are " - "in the queue") DEFINE_bool(enable_liveedit, true, "enable liveedit experimental feature") DEFINE_bool(hard_abort, true, "abort by crashing") @@ -485,8 +463,10 @@ DEFINE_bool(always_inline_smi_code, false, "always inline smi code in non-opt code") // heap.cc -DEFINE_int(max_new_space_size, 0, "max size of the new generation (in kBytes)") -DEFINE_int(max_old_space_size, 0, "max size of the old generation (in Mbytes)") +DEFINE_int(max_new_space_size, 0, + "max size of the new space consisting of two semi-spaces which are half" + "the size (in MBytes)") +DEFINE_int(max_old_space_size, 0, "max size of the old space (in Mbytes)") DEFINE_int(max_executable_size, 0, "max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false, "always perform global GCs") DEFINE_int(gc_interval, -1, "garbage collect after <n> allocations") @@ -511,6 +491,8 @@ DEFINE_bool(trace_external_memory, false, "it is adjusted.") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") +DEFINE_bool(weak_embedded_maps_in_ic, true, + "make maps embedded in inline cache stubs") DEFINE_bool(weak_embedded_maps_in_optimized_code, true, "make maps embedded in optimized code weak") DEFINE_bool(weak_embedded_objects_in_optimized_code, true, @@ -529,8 +511,8 @@ DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(track_gc_object_stats, false, "track object counts and memory usage") -DEFINE_bool(parallel_sweeping, true, "enable parallel sweeping") -DEFINE_bool(concurrent_sweeping, false, "enable concurrent sweeping") +DEFINE_bool(parallel_sweeping, false, "enable parallel sweeping") +DEFINE_bool(concurrent_sweeping, true, "enable concurrent sweeping") DEFINE_int(sweeper_threads, 0, "number of parallel and concurrent sweeping threads") DEFINE_bool(job_based_sweeping, false, "enable job based sweeping") @@ -556,8 +538,6 @@ DEFINE_bool(native_code_counters, false, // mark-compact.cc DEFINE_bool(always_compact, false, "Perform compaction on every full GC") -DEFINE_bool(lazy_sweeping, true, - "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true, @@ -644,7 +624,13 @@ DEFINE_string(testing_serialization_file, "/tmp/serdes", // mksnapshot.cc DEFINE_string(extra_code, NULL, "A filename with extra code to be included in" - " the snapshot (mksnapshot only)") + " the snapshot (mksnapshot only)") +DEFINE_string(raw_file, NULL, "A file to write the raw snapshot bytes to. " + "(mksnapshot only)") +DEFINE_string(raw_context_file, NULL, "A file to write the raw context " + "snapshot bytes to. (mksnapshot only)") +DEFINE_bool(omit, false, "Omit raw snapshot bytes in generated code. " + "(mksnapshot only)") // code-stubs-hydrogen.cc DEFINE_bool(profile_hydrogen_code_stub_compilation, false, @@ -664,13 +650,11 @@ DEFINE_neg_implication(predictable, parallel_sweeping) DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") -#ifdef ENABLE_DEBUGGER_SUPPORT DEFINE_bool(debugger, false, "Enable JavaScript debugger") DEFINE_bool(remote_debugger, false, "Connect JavaScript debugger to the " "debugger agent in another process") DEFINE_bool(debugger_agent, false, "Enable debugger agent") DEFINE_int(debugger_port, 5858, "Port to use for remote debugging") -#endif // ENABLE_DEBUGGER_SUPPORT DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, @@ -739,7 +723,6 @@ DEFINE_bool(print_scopes, false, "print scopes") DEFINE_bool(trace_contexts, false, "trace contexts operations") // heap.cc -DEFINE_bool(gc_greedy, false, "perform GC prior to some allocations") DEFINE_bool(gc_verbose, false, "print stuff during garbage collection") DEFINE_bool(heap_stats, false, "report heap statistics before and after GC") DEFINE_bool(code_stats, false, "report code statistics after GC") @@ -787,7 +770,6 @@ DEFINE_bool(trace_regexp_assembler, false, DEFINE_bool(log, false, "Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false, "Log all events to the log file.") -DEFINE_bool(log_runtime, false, "Activate runtime system %Log call.") DEFINE_bool(log_api, false, "Log API events to the log file.") DEFINE_bool(log_code, false, "Log code events to the log file without profiling.") diff --git a/deps/v8/src/flags.cc b/deps/v8/src/flags.cc index 8e42206c5..19a10e416 100644 --- a/deps/v8/src/flags.cc +++ b/deps/v8/src/flags.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <ctype.h> #include <stdlib.h> @@ -360,10 +337,15 @@ static Flag* FindFlag(const char* name) { } +bool FlagList::serializer_enabled_ = false; + + // static int FlagList::SetFlagsFromCommandLine(int* argc, char** argv, - bool remove_flags) { + bool remove_flags, + bool serializer_enabled) { + serializer_enabled_ = serializer_enabled; int return_code = 0; // parse arguments for (int i = 1; i < *argc;) { @@ -545,7 +527,7 @@ void FlagList::ResetAllFlags() { void FlagList::PrintHelp() { #if V8_TARGET_ARCH_ARM CpuFeatures::PrintTarget(); - CpuFeatures::Probe(); + CpuFeatures::Probe(serializer_enabled_); CpuFeatures::PrintFeatures(); #endif // V8_TARGET_ARCH_ARM diff --git a/deps/v8/src/flags.h b/deps/v8/src/flags.h index fe182e522..df786d7c3 100644 --- a/deps/v8/src/flags.h +++ b/deps/v8/src/flags.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FLAGS_H_ #define V8_FLAGS_H_ @@ -63,7 +40,10 @@ class FlagList { // --flag=value (non-bool flags only, no spaces around '=') // --flag value (non-bool flags only) // -- (equivalent to --js_arguments, captures all remaining args) - static int SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags); + static int SetFlagsFromCommandLine(int* argc, + char** argv, + bool remove_flags, + bool serializer_enabled = false); // Set the flag values by parsing the string str. Splits string into argc // substrings argv[], each of which consisting of non-white-space chars, @@ -78,6 +58,10 @@ class FlagList { // Set flags as consequence of being implied by another flag. static void EnforceFlagImplications(); + + private: + // TODO(svenpanne) Remove this when Serializer/startup has been refactored. + static bool serializer_enabled_; }; } } // namespace v8::internal diff --git a/deps/v8/src/frames-inl.h b/deps/v8/src/frames-inl.h index aacb5664a..9b5d4dbb9 100644 --- a/deps/v8/src/frames-inl.h +++ b/deps/v8/src/frames-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FRAMES_INL_H_ #define V8_FRAMES_INL_H_ diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc index 0c47de910..e7c2a149e 100644 --- a/deps/v8/src/frames.cc +++ b/deps/v8/src/frames.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -663,7 +640,7 @@ void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const { // Skip saved double registers. if (safepoint_entry.has_doubles()) { // Number of doubles not known at snapshot time. - ASSERT(!Serializer::enabled()); + ASSERT(!Serializer::enabled(isolate())); parameters_base += DoubleRegister::NumAllocatableRegisters() * kDoubleSize / kPointerSize; } @@ -806,7 +783,6 @@ void JavaScriptFrame::PrintTop(Isolate* isolate, bool print_args, bool print_line_number) { // constructor calls - HandleScope scope(isolate); DisallowHeapAllocation no_allocation; JavaScriptFrameIterator it(isolate); while (!it.done()) { @@ -827,8 +803,8 @@ void JavaScriptFrame::PrintTop(Isolate* isolate, int source_pos = code->SourcePosition(pc); Object* maybe_script = shared->script(); if (maybe_script->IsScript()) { - Handle<Script> script(Script::cast(maybe_script)); - int line = GetScriptLineNumberSafe(script, source_pos) + 1; + Script* script = Script::cast(maybe_script); + int line = script->GetLineNumber(source_pos) + 1; Object* script_name_raw = script->name(); if (script_name_raw->IsString()) { String* script_name = String::cast(script->name()); @@ -991,13 +967,10 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) { it.Next(); // Skip height. // The translation commands are ordered and the receiver is always - // at the first position. Since we are always at a call when we need - // to construct a stack trace, the receiver is always in a stack slot. + // at the first position. + // If we are at a call, the receiver is always in a stack slot. + // Otherwise we are not guaranteed to get the receiver value. opcode = static_cast<Translation::Opcode>(it.Next()); - ASSERT(opcode == Translation::STACK_SLOT || - opcode == Translation::LITERAL || - opcode == Translation::CAPTURED_OBJECT || - opcode == Translation::DUPLICATED_OBJECT); int index = it.Next(); // Get the correct receiver in the optimized frame. @@ -1021,6 +994,7 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) { : this->GetParameter(parameter_index); } } else { + // The receiver is not in a stack slot nor in a literal. We give up. // TODO(3029): Materializing a captured object (or duplicated // object) is hard, we return undefined for now. This breaks the // produced stack trace, as constructor frames aren't marked as @@ -1171,7 +1145,7 @@ void StackFrame::PrintIndex(StringStream* accumulator, void JavaScriptFrame::Print(StringStream* accumulator, PrintMode mode, int index) const { - HandleScope scope(isolate()); + DisallowHeapAllocation no_gc; Object* receiver = this->receiver(); JSFunction* function = this->function(); @@ -1185,13 +1159,11 @@ void JavaScriptFrame::Print(StringStream* accumulator, // doesn't contain scope info, scope_info will return 0 for the number of // parameters, stack local variables, context local variables, stack slots, // or context slots. - Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate())); - - Handle<SharedFunctionInfo> shared(function->shared()); - scope_info = Handle<ScopeInfo>(shared->scope_info()); + SharedFunctionInfo* shared = function->shared(); + ScopeInfo* scope_info = shared->scope_info(); Object* script_obj = shared->script(); if (script_obj->IsScript()) { - Handle<Script> script(Script::cast(script_obj)); + Script* script = Script::cast(script_obj); accumulator->Add(" ["); accumulator->PrintName(script->name()); @@ -1199,11 +1171,11 @@ void JavaScriptFrame::Print(StringStream* accumulator, if (code != NULL && code->kind() == Code::FUNCTION && pc >= code->instruction_start() && pc < code->instruction_end()) { int source_pos = code->SourcePosition(pc); - int line = GetScriptLineNumberSafe(script, source_pos) + 1; + int line = script->GetLineNumber(source_pos) + 1; accumulator->Add(":%d", line); } else { int function_start_pos = shared->start_position(); - int line = GetScriptLineNumberSafe(script, function_start_pos) + 1; + int line = script->GetLineNumber(function_start_pos) + 1; accumulator->Add(":~%d", line); } @@ -1406,14 +1378,14 @@ Address StubFailureTrampolineFrame::GetCallerStackPointer() const { Code* StubFailureTrampolineFrame::unchecked_code() const { Code* trampoline; - StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE). - FindCodeInCache(&trampoline, isolate()); + StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE). + FindCodeInCache(&trampoline); if (trampoline->contains(pc())) { return trampoline; } - StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE). - FindCodeInCache(&trampoline, isolate()); + StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE). + FindCodeInCache(&trampoline); if (trampoline->contains(pc())) { return trampoline; } diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h index 17f0cb35a..3dd6e9368 100644 --- a/deps/v8/src/frames.h +++ b/deps/v8/src/frames.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FRAMES_H_ #define V8_FRAMES_H_ diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc index fa9ecf41b..2846d2ba1 100644 --- a/deps/v8/src/full-codegen.cc +++ b/deps/v8/src/full-codegen.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -347,9 +324,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { cgen.PopulateTypeFeedbackInfo(code); code->set_has_deoptimization_support(info->HasDeoptimizationSupport()); code->set_handler_table(*cgen.handler_table()); -#ifdef ENABLE_DEBUGGER_SUPPORT code->set_compiled_optimizable(info->IsOptimizable()); -#endif // ENABLE_DEBUGGER_SUPPORT code->set_allow_osr_at_loop_nesting_level(0); code->set_profiler_ticks(0); code->set_back_edge_table_offset(table_offset); @@ -386,14 +361,12 @@ unsigned FullCodeGenerator::EmitBackEdgeTable() { } -void FullCodeGenerator::InitializeFeedbackVector() { - int length = info_->function()->slot_count(); - feedback_vector_ = isolate()->factory()->NewFixedArray(length, TENURED); - Handle<Object> sentinel = TypeFeedbackInfo::UninitializedSentinel(isolate()); - // Ensure that it's safe to set without using a write barrier. - ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), *sentinel); - for (int i = 0; i < length; i++) { - feedback_vector_->set(i, *sentinel, SKIP_WRITE_BARRIER); +void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) { + Handle<FixedArray> vector = FeedbackVector(); + if (!vector->get(slot)->IsAllocationSite()) { + Handle<AllocationSite> allocation_site = + isolate()->factory()->NewAllocationSite(); + vector->set(slot, *allocation_site); } } @@ -403,8 +376,8 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) { ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty()); if (!info_->HasDeoptimizationSupport()) return; int length = bailout_entries_.length(); - Handle<DeoptimizationOutputData> data = isolate()->factory()-> - NewDeoptimizationOutputData(length, TENURED); + Handle<DeoptimizationOutputData> data = + DeoptimizationOutputData::New(isolate(), length, TENURED); for (int i = 0; i < length; i++) { data->SetAstId(i, bailout_entries_[i].id); data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state)); @@ -416,24 +389,23 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) { void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) { Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo(); info->set_ic_total_count(ic_total_count_); - info->set_feedback_vector(*FeedbackVector()); ASSERT(!isolate()->heap()->InNewSpace(*info)); code->set_type_feedback_info(*info); } void FullCodeGenerator::Initialize() { + InitializeAstVisitor(info_->zone()); // The generation of debug code must match between the snapshot code and the // code that is generated later. This is assumed by the debugger when it is // calculating PC offsets after generating a debug version of code. Therefore // we disable the production of debug code in the full compiler if we are // either generating a snapshot or we booted from a snapshot. generate_debug_code_ = FLAG_debug_code && - !Serializer::enabled() && + !Serializer::enabled(isolate()) && !Snapshot::HaveASnapshotToStartFrom(); masm_->set_emit_debug_code(generate_debug_code_); masm_->set_predictable_code_size(true); - InitializeAstVisitor(info_->zone()); } @@ -833,7 +805,6 @@ void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) { void FullCodeGenerator::SetStatementPosition(Statement* stmt) { -#ifdef ENABLE_DEBUGGER_SUPPORT if (!isolate()->debugger()->IsDebuggerActive()) { CodeGenerator::RecordPositions(masm_, stmt->position()); } else { @@ -852,14 +823,10 @@ void FullCodeGenerator::SetStatementPosition(Statement* stmt) { Debug::GenerateSlot(masm_); } } -#else - CodeGenerator::RecordPositions(masm_, stmt->position()); -#endif } void FullCodeGenerator::SetExpressionPosition(Expression* expr) { -#ifdef ENABLE_DEBUGGER_SUPPORT if (!isolate()->debugger()->IsDebuggerActive()) { CodeGenerator::RecordPositions(masm_, expr->position()); } else { @@ -882,9 +849,6 @@ void FullCodeGenerator::SetExpressionPosition(Expression* expr) { Debug::GenerateSlot(masm_); } } -#else - CodeGenerator::RecordPositions(masm_, expr->position()); -#endif } @@ -1506,13 +1470,11 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) { -#ifdef ENABLE_DEBUGGER_SUPPORT Comment cmnt(masm_, "[ DebuggerStatement"); SetStatementPosition(stmt); __ DebugBreak(); // Ignore the return value. -#endif } @@ -1589,8 +1551,10 @@ void FullCodeGenerator::VisitNativeFunctionLiteral( Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub()); bool is_generator = false; Handle<SharedFunctionInfo> shared = - isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator, - code, Handle<ScopeInfo>(fun->shared()->scope_info())); + isolate()->factory()->NewSharedFunctionInfo( + name, literals, is_generator, + code, Handle<ScopeInfo>(fun->shared()->scope_info()), + Handle<FixedArray>(fun->shared()->feedback_vector())); shared->set_construct_stub(*construct_stub); // Copy the function data to the shared function info. diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h index 0d0a6ffed..167538ed9 100644 --- a/deps/v8/src/full-codegen.h +++ b/deps/v8/src/full-codegen.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FULL_CODEGEN_H_ #define V8_FULL_CODEGEN_H_ @@ -122,16 +99,21 @@ class FullCodeGenerator: public AstVisitor { // Platform-specific code size multiplier. #if V8_TARGET_ARCH_IA32 - static const int kCodeSizeMultiplier = 100; + static const int kCodeSizeMultiplier = 105; + static const int kBootCodeSizeMultiplier = 100; #elif V8_TARGET_ARCH_X64 - static const int kCodeSizeMultiplier = 162; + static const int kCodeSizeMultiplier = 170; + static const int kBootCodeSizeMultiplier = 140; #elif V8_TARGET_ARCH_ARM - static const int kCodeSizeMultiplier = 142; + static const int kCodeSizeMultiplier = 149; + static const int kBootCodeSizeMultiplier = 110; #elif V8_TARGET_ARCH_ARM64 // TODO(all): Copied ARM value. Check this is sensible for ARM64. - static const int kCodeSizeMultiplier = 142; + static const int kCodeSizeMultiplier = 149; + static const int kBootCodeSizeMultiplier = 110; #elif V8_TARGET_ARCH_MIPS - static const int kCodeSizeMultiplier = 142; + static const int kCodeSizeMultiplier = 149; + static const int kBootCodeSizeMultiplier = 120; #else #error Unsupported target architecture. #endif @@ -437,12 +419,9 @@ class FullCodeGenerator: public AstVisitor { // Feedback slot support. The feedback vector will be cleared during gc and // collected by the type-feedback oracle. Handle<FixedArray> FeedbackVector() { - return feedback_vector_; + return info_->feedback_vector(); } - void StoreFeedbackVectorSlot(int slot, Handle<Object> object) { - feedback_vector_->set(slot, *object); - } - void InitializeFeedbackVector(); + void EnsureSlotContainsAllocationSite(int slot); // Record a call's return site offset, used to rebuild the frame if the // called function was inlined at the site. @@ -485,9 +464,9 @@ class FullCodeGenerator: public AstVisitor { void EmitReturnSequence(); // Platform-specific code sequences for calls - void EmitCallWithStub(Call* expr); - void EmitCallWithIC(Call* expr); - void EmitKeyedCallWithIC(Call* expr, Expression* key); + void EmitCall(Call* expr, CallIC::CallType = CallIC::FUNCTION); + void EmitCallWithLoadIC(Call* expr); + void EmitKeyedCallWithLoadIC(Call* expr, Expression* key); // Platform-specific code for inline runtime calls. InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id); @@ -844,7 +823,6 @@ class FullCodeGenerator: public AstVisitor { ZoneList<BackEdgeEntry> back_edges_; int ic_total_count_; Handle<FixedArray> handler_table_; - Handle<FixedArray> feedback_vector_; Handle<Cell> profiling_counter_; bool generate_debug_code_; diff --git a/deps/v8/src/func-name-inferrer.cc b/deps/v8/src/func-name-inferrer.cc index 441113b7d..370d6264d 100644 --- a/deps/v8/src/func-name-inferrer.cc +++ b/deps/v8/src/func-name-inferrer.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -55,14 +32,16 @@ void FuncNameInferrer::PushEnclosingName(Handle<String> name) { void FuncNameInferrer::PushLiteralName(Handle<String> name) { - if (IsOpen() && !isolate()->heap()->prototype_string()->Equals(*name)) { + if (IsOpen() && + !String::Equals(isolate()->factory()->prototype_string(), name)) { names_stack_.Add(Name(name, kLiteralName), zone()); } } void FuncNameInferrer::PushVariableName(Handle<String> name) { - if (IsOpen() && !isolate()->heap()->dot_result_string()->Equals(*name)) { + if (IsOpen() && + !String::Equals(isolate()->factory()->dot_result_string(), name)) { names_stack_.Add(Name(name, kVariableName), zone()); } } @@ -86,10 +65,9 @@ Handle<String> FuncNameInferrer::MakeNameFromStackHelper(int pos, Handle<String> name = names_stack_.at(pos).name; if (prev->length() + name->length() + 1 > String::kMaxLength) return prev; Factory* factory = isolate()->factory(); - Handle<String> curr = factory->NewConsString(factory->dot_string(), name); - CHECK_NOT_EMPTY_HANDLE(isolate(), curr); - curr = factory->NewConsString(prev, curr); - CHECK_NOT_EMPTY_HANDLE(isolate(), curr); + Handle<String> curr = + factory->NewConsString(factory->dot_string(), name).ToHandleChecked(); + curr = factory->NewConsString(prev, curr).ToHandleChecked(); return MakeNameFromStackHelper(pos + 1, curr); } else { return MakeNameFromStackHelper(pos + 1, names_stack_.at(pos).name); diff --git a/deps/v8/src/func-name-inferrer.h b/deps/v8/src/func-name-inferrer.h index 41953ffed..f0fdbd22e 100644 --- a/deps/v8/src/func-name-inferrer.h +++ b/deps/v8/src/func-name-inferrer.h @@ -1,29 +1,6 @@ // Copyright 2006-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_FUNC_NAME_INFERRER_H_ #define V8_FUNC_NAME_INFERRER_H_ diff --git a/deps/v8/src/gdb-jit.cc b/deps/v8/src/gdb-jit.cc index afe5b7117..f2861c15f 100644 --- a/deps/v8/src/gdb-jit.cc +++ b/deps/v8/src/gdb-jit.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifdef ENABLE_GDB_JIT_INTERFACE #include "v8.h" @@ -252,8 +229,8 @@ class MachOSection : public DebugSectionBase<MachOSectionHeader> { segment_(segment), align_(align), flags_(flags) { - ASSERT(IsPowerOf2(align)); if (align_ != 0) { + ASSERT(IsPowerOf2(align)); align_ = WhichPowerOf2(align_); } } @@ -1002,7 +979,7 @@ class CodeDescription BASE_EMBEDDED { } int GetScriptLineNumber(int pos) { - return GetScriptLineNumberSafe(script_, pos) + 1; + return script_->GetLineNumber(pos) + 1; } @@ -1841,7 +1818,7 @@ extern "C" { JITDescriptor __jit_debug_descriptor = { 1, 0, 0, 0 }; #ifdef OBJECT_PRINT - void __gdb_print_v8_object(MaybeObject* object) { + void __gdb_print_v8_object(Object* object) { object->Print(); PrintF(stdout, "\n"); } @@ -2003,8 +1980,7 @@ void GDBJITInterface::AddCode(Handle<Name> name, CompilationInfo* info) { if (!FLAG_gdbjit) return; - // Force initialization of line_ends array. - GetScriptLineNumber(script, 0); + Script::InitLineEnds(script); if (!name.is_null() && name->IsString()) { SmartArrayPointer<char> name_cstring = diff --git a/deps/v8/src/gdb-jit.h b/deps/v8/src/gdb-jit.h index bc1a8f364..a96174000 100644 --- a/deps/v8/src/gdb-jit.h +++ b/deps/v8/src/gdb-jit.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_GDB_JIT_H_ #define V8_GDB_JIT_H_ diff --git a/deps/v8/src/generator.js b/deps/v8/src/generator.js index 3c8ea6f31..c152e3a6e 100644 --- a/deps/v8/src/generator.js +++ b/deps/v8/src/generator.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; diff --git a/deps/v8/src/global-handles.cc b/deps/v8/src/global-handles.cc index e06f79482..d6cd47918 100644 --- a/deps/v8/src/global-handles.cc +++ b/deps/v8/src/global-handles.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/global-handles.h b/deps/v8/src/global-handles.h index 13fc111d8..210c0565c 100644 --- a/deps/v8/src/global-handles.h +++ b/deps/v8/src/global-handles.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_GLOBAL_HANDLES_H_ #define V8_GLOBAL_HANDLES_H_ @@ -33,7 +10,7 @@ #include "handles.h" #include "list.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/globals.h b/deps/v8/src/globals.h index db666d804..5b4be2a6d 100644 --- a/deps/v8/src/globals.h +++ b/deps/v8/src/globals.h @@ -1,35 +1,14 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_GLOBALS_H_ #define V8_GLOBALS_H_ #include "../include/v8stdint.h" +#include "base/macros.h" + // Unfortunately, the INFINITY macro cannot be used with the '-pedantic' // warning flag and certain versions of GCC due to a bug: // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931 @@ -78,7 +57,7 @@ namespace internal { #elif defined(__ARMEL__) #define V8_HOST_ARCH_ARM 1 #define V8_HOST_ARCH_32_BIT 1 -#elif defined(__MIPSEL__) +#elif defined(__MIPSEB__) || defined(__MIPSEL__) #define V8_HOST_ARCH_MIPS 1 #define V8_HOST_ARCH_32_BIT 1 #else @@ -108,7 +87,7 @@ namespace internal { #define V8_TARGET_ARCH_ARM64 1 #elif defined(__ARMEL__) #define V8_TARGET_ARCH_ARM 1 -#elif defined(__MIPSEL__) +#elif defined(__MIPSEB__) || defined(__MIPSEL__) #define V8_TARGET_ARCH_MIPS 1 #else #error Target architecture was not detected as supported by v8 @@ -147,7 +126,7 @@ namespace internal { #endif #endif -// Determine architecture endiannes (we only support little-endian). +// Determine architecture endianness. #if V8_TARGET_ARCH_IA32 #define V8_TARGET_LITTLE_ENDIAN 1 #elif V8_TARGET_ARCH_X64 @@ -157,9 +136,13 @@ namespace internal { #elif V8_TARGET_ARCH_ARM64 #define V8_TARGET_LITTLE_ENDIAN 1 #elif V8_TARGET_ARCH_MIPS +#if defined(__MIPSEB__) +#define V8_TARGET_BIG_ENDIAN 1 +#else #define V8_TARGET_LITTLE_ENDIAN 1 +#endif #else -#error Unknown target architecture endiannes +#error Unknown target architecture endianness #endif // Determine whether the architecture uses an out-of-line constant pool. @@ -316,25 +299,6 @@ const int kUC16Size = sizeof(uc16); // NOLINT #define ROUND_UP(n, sz) (((n) + ((sz) - 1)) & ~((sz) - 1)) -// The expression OFFSET_OF(type, field) computes the byte-offset -// of the specified field relative to the containing type. This -// corresponds to 'offsetof' (in stddef.h), except that it doesn't -// use 0 or NULL, which causes a problem with the compiler warnings -// we have enabled (which is also why 'offsetof' doesn't seem to work). -// Here we simply use the non-zero value 4, which seems to work. -#define OFFSET_OF(type, field) \ - (reinterpret_cast<intptr_t>(&(reinterpret_cast<type*>(4)->field)) - 4) - - -// The expression ARRAY_SIZE(a) is a compile-time constant of type -// size_t which represents the number of elements of the given -// array. You should only use ARRAY_SIZE on statically allocated -// arrays. -#define ARRAY_SIZE(a) \ - ((sizeof(a) / sizeof(*(a))) / \ - static_cast<size_t>(!(sizeof(a) % sizeof(*(a))))) - - // The USE(x) template is used to silence C++ compiler warnings // issued for (yet) unused variables (typically parameters). template <typename T> @@ -354,52 +318,6 @@ F FUNCTION_CAST(Address addr) { } -// A macro to disallow the evil copy constructor and operator= functions -// This should be used in the private: declarations for a class -#define DISALLOW_COPY_AND_ASSIGN(TypeName) \ - TypeName(const TypeName&) V8_DELETE; \ - void operator=(const TypeName&) V8_DELETE - - -// A macro to disallow all the implicit constructors, namely the -// default constructor, copy constructor and operator= functions. -// -// This should be used in the private: declarations for a class -// that wants to prevent anyone from instantiating it. This is -// especially useful for classes containing only static methods. -#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \ - TypeName() V8_DELETE; \ - DISALLOW_COPY_AND_ASSIGN(TypeName) - - -// Newly written code should use V8_INLINE and V8_NOINLINE directly. -#define INLINE(declarator) V8_INLINE declarator -#define NO_INLINE(declarator) V8_NOINLINE declarator - - -// Newly written code should use V8_WARN_UNUSED_RESULT. -#define MUST_USE_RESULT V8_WARN_UNUSED_RESULT - - -// Define DISABLE_ASAN macros. -#if defined(__has_feature) -#if __has_feature(address_sanitizer) -#define DISABLE_ASAN __attribute__((no_sanitize_address)) -#endif -#endif - - -#ifndef DISABLE_ASAN -#define DISABLE_ASAN -#endif - -#if V8_CC_GNU -#define V8_IMMEDIATE_CRASH() __builtin_trap() -#else -#define V8_IMMEDIATE_CRASH() ((void(*)())0)() -#endif - - // ----------------------------------------------------------------------------- // Forward declarations for frequently used classes // (sorted alphabetically) diff --git a/deps/v8/src/handles-inl.h b/deps/v8/src/handles-inl.h index a25b4a226..d9f8c69c1 100644 --- a/deps/v8/src/handles-inl.h +++ b/deps/v8/src/handles-inl.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // #ifndef V8_HANDLES_INL_H_ @@ -39,40 +16,36 @@ namespace internal { template<typename T> Handle<T>::Handle(T* obj) { - ASSERT(!obj->IsFailure()); location_ = HandleScope::CreateHandle(obj->GetIsolate(), obj); } template<typename T> Handle<T>::Handle(T* obj, Isolate* isolate) { - ASSERT(!obj->IsFailure()); location_ = HandleScope::CreateHandle(isolate, obj); } template <typename T> -inline bool Handle<T>::is_identical_to(const Handle<T> other) const { - ASSERT(location_ == NULL || !(*location_)->IsFailure()); - if (location_ == other.location_) return true; - if (location_ == NULL || other.location_ == NULL) return false; +inline bool Handle<T>::is_identical_to(const Handle<T> o) const { // Dereferencing deferred handles to check object equality is safe. - SLOW_ASSERT(IsDereferenceAllowed(NO_DEFERRED_CHECK) && - other.IsDereferenceAllowed(NO_DEFERRED_CHECK)); - return *location_ == *other.location_; + SLOW_ASSERT( + (location_ == NULL || IsDereferenceAllowed(NO_DEFERRED_CHECK)) && + (o.location_ == NULL || o.IsDereferenceAllowed(NO_DEFERRED_CHECK))); + if (location_ == o.location_) return true; + if (location_ == NULL || o.location_ == NULL) return false; + return *location_ == *o.location_; } template <typename T> inline T* Handle<T>::operator*() const { - ASSERT(location_ != NULL && !(*location_)->IsFailure()); SLOW_ASSERT(IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK)); return *BitCast<T**>(location_); } template <typename T> inline T** Handle<T>::location() const { - ASSERT(location_ == NULL || !(*location_)->IsFailure()); SLOW_ASSERT(location_ == NULL || IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK)); return location_; diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc index 398a68265..89e034e9d 100644 --- a/deps/v8/src/handles.cc +++ b/deps/v8/src/handles.cc @@ -1,45 +1,10 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" -#include "accessors.h" -#include "api.h" -#include "arguments.h" -#include "bootstrapper.h" -#include "compiler.h" -#include "debug.h" -#include "execution.h" -#include "global-handles.h" -#include "natives.h" -#include "runtime.h" -#include "string-search.h" -#include "stub-cache.h" -#include "vm-state-inl.h" +#include "handles.h" namespace v8 { namespace internal { @@ -124,605 +89,6 @@ Address HandleScope::current_limit_address(Isolate* isolate) { } -Handle<FixedArray> AddKeysFromJSArray(Handle<FixedArray> content, - Handle<JSArray> array) { - CALL_HEAP_FUNCTION(content->GetIsolate(), - content->AddKeysFromJSArray(*array), FixedArray); -} - - -Handle<FixedArray> UnionOfKeys(Handle<FixedArray> first, - Handle<FixedArray> second) { - CALL_HEAP_FUNCTION(first->GetIsolate(), - first->UnionOfKeys(*second), FixedArray); -} - - -Handle<JSGlobalProxy> ReinitializeJSGlobalProxy( - Handle<JSFunction> constructor, - Handle<JSGlobalProxy> global) { - CALL_HEAP_FUNCTION( - constructor->GetIsolate(), - constructor->GetHeap()->ReinitializeJSGlobalProxy(*constructor, *global), - JSGlobalProxy); -} - - -void FlattenString(Handle<String> string) { - CALL_HEAP_FUNCTION_VOID(string->GetIsolate(), string->TryFlatten()); -} - - -Handle<String> FlattenGetString(Handle<String> string) { - CALL_HEAP_FUNCTION(string->GetIsolate(), string->TryFlatten(), String); -} - - -Handle<Object> ForceSetProperty(Handle<JSObject> object, - Handle<Object> key, - Handle<Object> value, - PropertyAttributes attributes) { - return Runtime::ForceSetObjectProperty(object->GetIsolate(), object, key, - value, attributes); -} - - -Handle<Object> DeleteProperty(Handle<JSObject> object, Handle<Object> key) { - Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - Runtime::DeleteObjectProperty( - isolate, object, key, JSReceiver::NORMAL_DELETION), - Object); -} - - -Handle<Object> ForceDeleteProperty(Handle<JSObject> object, - Handle<Object> key) { - Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - Runtime::DeleteObjectProperty( - isolate, object, key, JSReceiver::FORCE_DELETION), - Object); -} - - -Handle<Object> HasProperty(Handle<JSReceiver> obj, Handle<Object> key) { - Isolate* isolate = obj->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - Runtime::HasObjectProperty(isolate, obj, key), Object); -} - - -Handle<Object> GetProperty(Handle<JSReceiver> obj, - const char* name) { - Isolate* isolate = obj->GetIsolate(); - Handle<String> str = isolate->factory()->InternalizeUtf8String(name); - CALL_HEAP_FUNCTION(isolate, obj->GetProperty(*str), Object); -} - - -Handle<Object> GetProperty(Isolate* isolate, - Handle<Object> obj, - Handle<Object> key) { - CALL_HEAP_FUNCTION(isolate, - Runtime::GetObjectProperty(isolate, obj, key), Object); -} - - -Handle<String> LookupSingleCharacterStringFromCode(Isolate* isolate, - uint32_t index) { - CALL_HEAP_FUNCTION( - isolate, - isolate->heap()->LookupSingleCharacterStringFromCode(index), - String); -} - - -// Wrappers for scripts are kept alive and cached in weak global -// handles referred from foreign objects held by the scripts as long as -// they are used. When they are not used anymore, the garbage -// collector will call the weak callback on the global handle -// associated with the wrapper and get rid of both the wrapper and the -// handle. -static void ClearWrapperCache( - const v8::WeakCallbackData<v8::Value, void>& data) { - Object** location = reinterpret_cast<Object**>(data.GetParameter()); - JSValue* wrapper = JSValue::cast(*location); - Foreign* foreign = Script::cast(wrapper->value())->wrapper(); - ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); - foreign->set_foreign_address(0); - GlobalHandles::Destroy(location); - Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate()); - isolate->counters()->script_wrappers()->Decrement(); -} - - -Handle<JSValue> GetScriptWrapper(Handle<Script> script) { - if (script->wrapper()->foreign_address() != NULL) { - // Return a handle for the existing script wrapper from the cache. - return Handle<JSValue>( - *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address())); - } - Isolate* isolate = script->GetIsolate(); - // Construct a new script wrapper. - isolate->counters()->script_wrappers()->Increment(); - Handle<JSFunction> constructor = isolate->script_function(); - Handle<JSValue> result = - Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor)); - - // The allocation might have triggered a GC, which could have called this - // function recursively, and a wrapper has already been created and cached. - // In that case, simply return a handle for the cached wrapper. - if (script->wrapper()->foreign_address() != NULL) { - return Handle<JSValue>( - *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address())); - } - - result->set_value(*script); - - // Create a new weak global handle and use it to cache the wrapper - // for future use. The cache will automatically be cleared by the - // garbage collector when it is not used anymore. - Handle<Object> handle = isolate->global_handles()->Create(*result); - GlobalHandles::MakeWeak(handle.location(), - reinterpret_cast<void*>(handle.location()), - &ClearWrapperCache); - script->wrapper()->set_foreign_address( - reinterpret_cast<Address>(handle.location())); - return result; -} - - -// Init line_ends array with code positions of line ends inside script -// source. -void InitScriptLineEnds(Handle<Script> script) { - if (!script->line_ends()->IsUndefined()) return; - - Isolate* isolate = script->GetIsolate(); - - if (!script->source()->IsString()) { - ASSERT(script->source()->IsUndefined()); - Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0); - script->set_line_ends(*empty); - ASSERT(script->line_ends()->IsFixedArray()); - return; - } - - Handle<String> src(String::cast(script->source()), isolate); - - Handle<FixedArray> array = CalculateLineEnds(src, true); - - if (*array != isolate->heap()->empty_fixed_array()) { - array->set_map(isolate->heap()->fixed_cow_array_map()); - } - - script->set_line_ends(*array); - ASSERT(script->line_ends()->IsFixedArray()); -} - - -template <typename SourceChar> -static void CalculateLineEnds(Isolate* isolate, - List<int>* line_ends, - Vector<const SourceChar> src, - bool with_last_line) { - const int src_len = src.length(); - StringSearch<uint8_t, SourceChar> search(isolate, STATIC_ASCII_VECTOR("\n")); - - // Find and record line ends. - int position = 0; - while (position != -1 && position < src_len) { - position = search.Search(src, position); - if (position != -1) { - line_ends->Add(position); - position++; - } else if (with_last_line) { - // Even if the last line misses a line end, it is counted. - line_ends->Add(src_len); - return; - } - } -} - - -Handle<FixedArray> CalculateLineEnds(Handle<String> src, - bool with_last_line) { - src = FlattenGetString(src); - // Rough estimate of line count based on a roughly estimated average - // length of (unpacked) code. - int line_count_estimate = src->length() >> 4; - List<int> line_ends(line_count_estimate); - Isolate* isolate = src->GetIsolate(); - { - DisallowHeapAllocation no_allocation; // ensure vectors stay valid. - // Dispatch on type of strings. - String::FlatContent content = src->GetFlatContent(); - ASSERT(content.IsFlat()); - if (content.IsAscii()) { - CalculateLineEnds(isolate, - &line_ends, - content.ToOneByteVector(), - with_last_line); - } else { - CalculateLineEnds(isolate, - &line_ends, - content.ToUC16Vector(), - with_last_line); - } - } - int line_count = line_ends.length(); - Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count); - for (int i = 0; i < line_count; i++) { - array->set(i, Smi::FromInt(line_ends[i])); - } - return array; -} - - -// Convert code position into line number. -int GetScriptLineNumber(Handle<Script> script, int code_pos) { - InitScriptLineEnds(script); - DisallowHeapAllocation no_allocation; - FixedArray* line_ends_array = FixedArray::cast(script->line_ends()); - const int line_ends_len = line_ends_array->length(); - - if (!line_ends_len) return -1; - - if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) { - return script->line_offset()->value(); - } - - int left = 0; - int right = line_ends_len; - while (int half = (right - left) / 2) { - if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) { - right -= half; - } else { - left += half; - } - } - return right + script->line_offset()->value(); -} - - -// Convert code position into column number. -int GetScriptColumnNumber(Handle<Script> script, int code_pos) { - int line_number = GetScriptLineNumber(script, code_pos); - if (line_number == -1) return -1; - - DisallowHeapAllocation no_allocation; - FixedArray* line_ends_array = FixedArray::cast(script->line_ends()); - line_number = line_number - script->line_offset()->value(); - if (line_number == 0) return code_pos + script->column_offset()->value(); - int prev_line_end_pos = - Smi::cast(line_ends_array->get(line_number - 1))->value(); - return code_pos - (prev_line_end_pos + 1); -} - - -int GetScriptLineNumberSafe(Handle<Script> script, int code_pos) { - DisallowHeapAllocation no_allocation; - if (!script->line_ends()->IsUndefined()) { - return GetScriptLineNumber(script, code_pos); - } - // Slow mode: we do not have line_ends. We have to iterate through source. - if (!script->source()->IsString()) { - return -1; - } - String* source = String::cast(script->source()); - int line = 0; - int len = source->length(); - for (int pos = 0; pos < len; pos++) { - if (pos == code_pos) { - break; - } - if (source->Get(pos) == '\n') { - line++; - } - } - return line; -} - - -// Compute the property keys from the interceptor. -// TODO(rossberg): support symbols in API, and filter here if needed. -v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver, - Handle<JSObject> object) { - Isolate* isolate = receiver->GetIsolate(); - Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); - PropertyCallbackArguments - args(isolate, interceptor->data(), *receiver, *object); - v8::Handle<v8::Array> result; - if (!interceptor->enumerator()->IsUndefined()) { - v8::NamedPropertyEnumeratorCallback enum_fun = - v8::ToCData<v8::NamedPropertyEnumeratorCallback>( - interceptor->enumerator()); - LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object)); - result = args.Call(enum_fun); - } -#if ENABLE_EXTRA_CHECKS - CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject()); -#endif - return v8::Local<v8::Array>::New(reinterpret_cast<v8::Isolate*>(isolate), - result); -} - - -// Compute the element keys from the interceptor. -v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver, - Handle<JSObject> object) { - Isolate* isolate = receiver->GetIsolate(); - Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); - PropertyCallbackArguments - args(isolate, interceptor->data(), *receiver, *object); - v8::Handle<v8::Array> result; - if (!interceptor->enumerator()->IsUndefined()) { - v8::IndexedPropertyEnumeratorCallback enum_fun = - v8::ToCData<v8::IndexedPropertyEnumeratorCallback>( - interceptor->enumerator()); - LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object)); - result = args.Call(enum_fun); -#if ENABLE_EXTRA_CHECKS - CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject()); -#endif - } - return v8::Local<v8::Array>::New(reinterpret_cast<v8::Isolate*>(isolate), - result); -} - - -Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script) { - Isolate* isolate = script->GetIsolate(); - Handle<String> name_or_source_url_key = - isolate->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("nameOrSourceURL")); - Handle<JSValue> script_wrapper = GetScriptWrapper(script); - Handle<Object> property = GetProperty(isolate, - script_wrapper, - name_or_source_url_key); - ASSERT(property->IsJSFunction()); - Handle<JSFunction> method = Handle<JSFunction>::cast(property); - bool caught_exception; - Handle<Object> result = Execution::TryCall(method, script_wrapper, 0, - NULL, &caught_exception); - if (caught_exception) { - result = isolate->factory()->undefined_value(); - } - return result; -} - - -static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { - int len = array->length(); - for (int i = 0; i < len; i++) { - Object* e = array->get(i); - if (!(e->IsString() || e->IsNumber())) return false; - } - return true; -} - - -Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, - KeyCollectionType type, - bool* threw) { - USE(ContainsOnlyValidKeys); - Isolate* isolate = object->GetIsolate(); - Handle<FixedArray> content = isolate->factory()->empty_fixed_array(); - Handle<JSObject> arguments_boilerplate = Handle<JSObject>( - isolate->context()->native_context()->sloppy_arguments_boilerplate(), - isolate); - Handle<JSFunction> arguments_function = Handle<JSFunction>( - JSFunction::cast(arguments_boilerplate->map()->constructor()), - isolate); - - // Only collect keys if access is permitted. - for (Handle<Object> p = object; - *p != isolate->heap()->null_value(); - p = Handle<Object>(p->GetPrototype(isolate), isolate)) { - if (p->IsJSProxy()) { - Handle<JSProxy> proxy(JSProxy::cast(*p), isolate); - Handle<Object> args[] = { proxy }; - Handle<Object> names = Execution::Call(isolate, - isolate->proxy_enumerate(), - object, - ARRAY_SIZE(args), - args, - threw); - if (*threw) return content; - content = AddKeysFromJSArray(content, Handle<JSArray>::cast(names)); - break; - } - - Handle<JSObject> current(JSObject::cast(*p), isolate); - - // Check access rights if required. - if (current->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(current, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(current, v8::ACCESS_KEYS); - if (isolate->has_scheduled_exception()) { - isolate->PromoteScheduledException(); - *threw = true; - } - break; - } - - // Compute the element keys. - Handle<FixedArray> element_keys = - isolate->factory()->NewFixedArray(current->NumberOfEnumElements()); - current->GetEnumElementKeys(*element_keys); - content = UnionOfKeys(content, element_keys); - ASSERT(ContainsOnlyValidKeys(content)); - - // Add the element keys from the interceptor. - if (current->HasIndexedInterceptor()) { - v8::Handle<v8::Array> result = - GetKeysForIndexedInterceptor(object, current); - if (!result.IsEmpty()) - content = AddKeysFromJSArray(content, v8::Utils::OpenHandle(*result)); - ASSERT(ContainsOnlyValidKeys(content)); - } - - // We can cache the computed property keys if access checks are - // not needed and no interceptors are involved. - // - // We do not use the cache if the object has elements and - // therefore it does not make sense to cache the property names - // for arguments objects. Arguments objects will always have - // elements. - // Wrapped strings have elements, but don't have an elements - // array or dictionary. So the fast inline test for whether to - // use the cache says yes, so we should not create a cache. - bool cache_enum_keys = - ((current->map()->constructor() != *arguments_function) && - !current->IsJSValue() && - !current->IsAccessCheckNeeded() && - !current->HasNamedInterceptor() && - !current->HasIndexedInterceptor()); - // Compute the property keys and cache them if possible. - content = - UnionOfKeys(content, GetEnumPropertyKeys(current, cache_enum_keys)); - ASSERT(ContainsOnlyValidKeys(content)); - - // Add the property keys from the interceptor. - if (current->HasNamedInterceptor()) { - v8::Handle<v8::Array> result = - GetKeysForNamedInterceptor(object, current); - if (!result.IsEmpty()) - content = AddKeysFromJSArray(content, v8::Utils::OpenHandle(*result)); - ASSERT(ContainsOnlyValidKeys(content)); - } - - // If we only want local properties we bail out after the first - // iteration. - if (type == LOCAL_ONLY) - break; - } - return content; -} - - -Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw) { - Isolate* isolate = object->GetIsolate(); - isolate->counters()->for_in()->Increment(); - Handle<FixedArray> elements = - GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, threw); - return isolate->factory()->NewJSArrayWithElements(elements); -} - - -Handle<FixedArray> ReduceFixedArrayTo(Handle<FixedArray> array, int length) { - ASSERT(array->length() >= length); - if (array->length() == length) return array; - - Handle<FixedArray> new_array = - array->GetIsolate()->factory()->NewFixedArray(length); - for (int i = 0; i < length; ++i) new_array->set(i, array->get(i)); - return new_array; -} - - -Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, - bool cache_result) { - Isolate* isolate = object->GetIsolate(); - if (object->HasFastProperties()) { - if (object->map()->instance_descriptors()->HasEnumCache()) { - int own_property_count = object->map()->EnumLength(); - // If we have an enum cache, but the enum length of the given map is set - // to kInvalidEnumCache, this means that the map itself has never used the - // present enum cache. The first step to using the cache is to set the - // enum length of the map by counting the number of own descriptors that - // are not DONT_ENUM or SYMBOLIC. - if (own_property_count == kInvalidEnumCacheSentinel) { - own_property_count = object->map()->NumberOfDescribedProperties( - OWN_DESCRIPTORS, DONT_SHOW); - - if (cache_result) object->map()->SetEnumLength(own_property_count); - } - - DescriptorArray* desc = object->map()->instance_descriptors(); - Handle<FixedArray> keys(desc->GetEnumCache(), isolate); - - // In case the number of properties required in the enum are actually - // present, we can reuse the enum cache. Otherwise, this means that the - // enum cache was generated for a previous (smaller) version of the - // Descriptor Array. In that case we regenerate the enum cache. - if (own_property_count <= keys->length()) { - isolate->counters()->enum_cache_hits()->Increment(); - return ReduceFixedArrayTo(keys, own_property_count); - } - } - - Handle<Map> map(object->map()); - - if (map->instance_descriptors()->IsEmpty()) { - isolate->counters()->enum_cache_hits()->Increment(); - if (cache_result) map->SetEnumLength(0); - return isolate->factory()->empty_fixed_array(); - } - - isolate->counters()->enum_cache_misses()->Increment(); - int num_enum = map->NumberOfDescribedProperties(ALL_DESCRIPTORS, DONT_SHOW); - - Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum); - Handle<FixedArray> indices = isolate->factory()->NewFixedArray(num_enum); - - Handle<DescriptorArray> descs = - Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate); - - int real_size = map->NumberOfOwnDescriptors(); - int enum_size = 0; - int index = 0; - - for (int i = 0; i < descs->number_of_descriptors(); i++) { - PropertyDetails details = descs->GetDetails(i); - Object* key = descs->GetKey(i); - if (!(details.IsDontEnum() || key->IsSymbol())) { - if (i < real_size) ++enum_size; - storage->set(index, key); - if (!indices.is_null()) { - if (details.type() != FIELD) { - indices = Handle<FixedArray>(); - } else { - int field_index = descs->GetFieldIndex(i); - if (field_index >= map->inobject_properties()) { - field_index = -(field_index - map->inobject_properties() + 1); - } - indices->set(index, Smi::FromInt(field_index)); - } - } - index++; - } - } - ASSERT(index == storage->length()); - - Handle<FixedArray> bridge_storage = - isolate->factory()->NewFixedArray( - DescriptorArray::kEnumCacheBridgeLength); - DescriptorArray* desc = object->map()->instance_descriptors(); - desc->SetEnumCache(*bridge_storage, - *storage, - indices.is_null() ? Object::cast(Smi::FromInt(0)) - : Object::cast(*indices)); - if (cache_result) { - object->map()->SetEnumLength(enum_size); - } - - return ReduceFixedArrayTo(storage, enum_size); - } else { - Handle<NameDictionary> dictionary(object->property_dictionary()); - int length = dictionary->NumberOfEnumElements(); - if (length == 0) { - return Handle<FixedArray>(isolate->heap()->empty_fixed_array()); - } - Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length); - dictionary->CopyEnumKeysTo(*storage); - return storage; - } -} - - DeferredHandleScope::DeferredHandleScope(Isolate* isolate) : impl_(isolate->handle_scope_implementer()) { impl_->BeginDeferredScope(); @@ -761,16 +127,4 @@ DeferredHandles* DeferredHandleScope::Detach() { return deferred; } - -void AddWeakObjectToCodeDependency(Heap* heap, - Handle<Object> object, - Handle<Code> code) { - heap->EnsureWeakObjectToCodeTable(); - Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(*object)); - dep = DependentCode::Insert(dep, DependentCode::kWeaklyEmbeddedGroup, code); - CALL_HEAP_FUNCTION_VOID(heap->isolate(), - heap->AddWeakObjectToCodeDependency(*object, *dep)); -} - - } } // namespace v8::internal diff --git a/deps/v8/src/handles.h b/deps/v8/src/handles.h index 853865804..5dc4a5ddd 100644 --- a/deps/v8/src/handles.h +++ b/deps/v8/src/handles.h @@ -1,39 +1,78 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HANDLES_H_ #define V8_HANDLES_H_ -#include "allocation.h" #include "objects.h" namespace v8 { namespace internal { +// A Handle can be converted into a MaybeHandle. Converting a MaybeHandle +// into a Handle requires checking that it does not point to NULL. This +// ensures NULL checks before use. +// Do not use MaybeHandle as argument type. + +template<typename T> +class MaybeHandle { + public: + INLINE(MaybeHandle()) : location_(NULL) { } + + // Constructor for handling automatic up casting from Handle. + // Ex. Handle<JSArray> can be passed when MaybeHandle<Object> is expected. + template <class S> MaybeHandle(Handle<S> handle) { +#ifdef DEBUG + T* a = NULL; + S* b = NULL; + a = b; // Fake assignment to enforce type checks. + USE(a); +#endif + this->location_ = reinterpret_cast<T**>(handle.location()); + } + + // Constructor for handling automatic up casting. + // Ex. MaybeHandle<JSArray> can be passed when Handle<Object> is expected. + template <class S> MaybeHandle(MaybeHandle<S> maybe_handle) { +#ifdef DEBUG + T* a = NULL; + S* b = NULL; + a = b; // Fake assignment to enforce type checks. + USE(a); +#endif + location_ = reinterpret_cast<T**>(maybe_handle.location_); + } + + INLINE(void Assert()) { ASSERT(location_ != NULL); } + INLINE(void Check()) { CHECK(location_ != NULL); } + + INLINE(Handle<T> ToHandleChecked()) { + Check(); + return Handle<T>(location_); + } + + // Convert to a Handle with a type that can be upcasted to. + template <class S> INLINE(bool ToHandle(Handle<S>* out)) { + if (location_ == NULL) { + *out = Handle<T>::null(); + return false; + } else { + *out = Handle<T>(location_); + return true; + } + } + + bool is_null() const { return location_ == NULL; } + + protected: + T** location_; + + // MaybeHandles of different classes are allowed to access each + // other's location_. + template<class S> friend class MaybeHandle; +}; + // ---------------------------------------------------------------------------- // A Handle provides a reference to an object that survives relocation by // the garbage collector. @@ -47,7 +86,9 @@ class Handle { INLINE(explicit Handle(T* obj)); INLINE(Handle(T* obj, Isolate* isolate)); - INLINE(Handle()) : location_(NULL) {} + // TODO(yangguo): Values that contain empty handles should be declared as + // MaybeHandle to force validation before being used as handles. + INLINE(Handle()) : location_(NULL) { } // Constructor for handling automatic up casting. // Ex. Handle<JSFunction> can be passed when Handle<Object> is expected. @@ -77,6 +118,8 @@ class Handle { return Handle<T>(reinterpret_cast<T**>(that.location_)); } + // TODO(yangguo): Values that contain empty handles should be declared as + // MaybeHandle to force validation before being used as handles. static Handle<T> null() { return Handle<T>(); } bool is_null() const { return location_ == NULL; } @@ -112,6 +155,13 @@ inline Handle<T> handle(T* t) { } +// Key comparison function for Map handles. +inline bool operator<(const Handle<Map>& lhs, const Handle<Map>& rhs) { + // This is safe because maps don't move. + return *lhs < *rhs; +} + + class DeferredHandles; class HandleScopeImplementer; @@ -214,91 +264,6 @@ class DeferredHandleScope { }; -// ---------------------------------------------------------------------------- -// Handle operations. -// They might invoke garbage collection. The result is an handle to -// an object of expected type, or the handle is an error if running out -// of space or encountering an internal error. - -// Flattens a string. -void FlattenString(Handle<String> str); - -// Flattens a string and returns the underlying external or sequential -// string. -Handle<String> FlattenGetString(Handle<String> str); - -Handle<Object> ForceSetProperty(Handle<JSObject> object, - Handle<Object> key, - Handle<Object> value, - PropertyAttributes attributes); - -Handle<Object> DeleteProperty(Handle<JSObject> object, Handle<Object> key); - -Handle<Object> ForceDeleteProperty(Handle<JSObject> object, Handle<Object> key); - -Handle<Object> HasProperty(Handle<JSReceiver> obj, Handle<Object> key); - -Handle<Object> GetProperty(Handle<JSReceiver> obj, const char* name); - -Handle<Object> GetProperty(Isolate* isolate, - Handle<Object> obj, - Handle<Object> key); - -Handle<String> LookupSingleCharacterStringFromCode(Isolate* isolate, - uint32_t index); - -Handle<FixedArray> AddKeysFromJSArray(Handle<FixedArray>, - Handle<JSArray> array); - -// Get the JS object corresponding to the given script; create it -// if none exists. -Handle<JSValue> GetScriptWrapper(Handle<Script> script); - -// Script line number computations. Note that the line number is zero-based. -void InitScriptLineEnds(Handle<Script> script); -// For string calculates an array of line end positions. If the string -// does not end with a new line character, this character may optionally be -// imagined. -Handle<FixedArray> CalculateLineEnds(Handle<String> string, - bool with_imaginary_last_new_line); -int GetScriptLineNumber(Handle<Script> script, int code_position); -// The safe version does not make heap allocations but may work much slower. -int GetScriptLineNumberSafe(Handle<Script> script, int code_position); -int GetScriptColumnNumber(Handle<Script> script, int code_position); -Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script); - -// Computes the enumerable keys from interceptors. Used for debug mirrors and -// by GetKeysInFixedArrayFor below. -v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver, - Handle<JSObject> object); -v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver, - Handle<JSObject> object); - -enum KeyCollectionType { LOCAL_ONLY, INCLUDE_PROTOS }; - -// Computes the enumerable keys for a JSObject. Used for implementing -// "for (n in object) { }". -Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, - KeyCollectionType type, - bool* threw); -Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw); -Handle<FixedArray> ReduceFixedArrayTo(Handle<FixedArray> array, int length); -Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, - bool cache_result); - -// Computes the union of keys and return the result. -// Used for implementing "for (n in object) { }" -Handle<FixedArray> UnionOfKeys(Handle<FixedArray> first, - Handle<FixedArray> second); - -Handle<JSGlobalProxy> ReinitializeJSGlobalProxy( - Handle<JSFunction> constructor, - Handle<JSGlobalProxy> global); - -void AddWeakObjectToCodeDependency(Heap* heap, - Handle<Object> object, - Handle<Code> code); - // Seal off the current HandleScope so that new handles can only be created // if a new HandleScope is entered. class SealHandleScope BASE_EMBEDDED { diff --git a/deps/v8/src/harmony-array.js b/deps/v8/src/harmony-array.js index d37d87538..dbcb292a0 100644 --- a/deps/v8/src/harmony-array.js +++ b/deps/v8/src/harmony-array.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. 'use strict'; @@ -103,6 +80,49 @@ function ArrayFindIndex(predicate /* thisArg */) { // length == 1 } +// ES6, draft 04-05-14, section 22.1.3.6 +function ArrayFill(value /* [, start [, end ] ] */) { // length == 1 + CHECK_OBJECT_COERCIBLE(this, "Array.prototype.fill"); + + var array = ToObject(this); + var length = TO_UINT32(array.length); + + var i = 0; + var end = length; + + if (%_ArgumentsLength() > 1) { + i = %_Arguments(1); + i = IS_UNDEFINED(i) ? 0 : TO_INTEGER(i); + if (%_ArgumentsLength() > 2) { + end = %_Arguments(2); + end = IS_UNDEFINED(end) ? length : TO_INTEGER(end); + } + } + + if (i < 0) { + i += length; + if (i < 0) i = 0; + } else { + if (i > length) i = length; + } + + if (end < 0) { + end += length; + if (end < 0) end = 0; + } else { + if (end > length) end = length; + } + + if ((end - i) > 0 && ObjectIsFrozen(array)) { + throw MakeTypeError("array_functions_on_frozen", + ["Array.prototype.fill"]); + } + + for (; i < end; i++) + array[i] = value; + return array; +} + // ------------------------------------------------------------------- function HarmonyArrayExtendArrayPrototype() { @@ -111,7 +131,8 @@ function HarmonyArrayExtendArrayPrototype() { // Set up the non-enumerable functions on the Array prototype object. InstallFunctions($Array.prototype, DONT_ENUM, $Array( "find", ArrayFind, - "findIndex", ArrayFindIndex + "findIndex", ArrayFindIndex, + "fill", ArrayFill )); } diff --git a/deps/v8/src/harmony-math.js b/deps/v8/src/harmony-math.js index 298fa58cb..505e9a163 100644 --- a/deps/v8/src/harmony-math.js +++ b/deps/v8/src/harmony-math.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. 'use strict'; @@ -156,7 +133,7 @@ function MathHypot(x, y) { // Function length is 2. // ES6 draft 09-27-13, section 20.2.2.16. function MathFround(x) { - return %Math_fround(TO_NUMBER_INLINE(x)); + return %MathFround(TO_NUMBER_INLINE(x)); } diff --git a/deps/v8/src/harmony-string.js b/deps/v8/src/harmony-string.js index cc3c5cf93..4cd8e6687 100644 --- a/deps/v8/src/harmony-string.js +++ b/deps/v8/src/harmony-string.js @@ -1,29 +1,6 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. 'use strict'; @@ -53,7 +30,7 @@ function StringRepeat(count) { } -// ES6 draft 01-20-14, section 21.1.3.18 +// ES6 draft 04-05-14, section 21.1.3.18 function StringStartsWith(searchString /* position */) { // length == 1 CHECK_OBJECT_COERCIBLE(this, "String.prototype.startsWith"); @@ -82,7 +59,7 @@ function StringStartsWith(searchString /* position */) { // length == 1 } -// ES6 draft 01-20-14, section 21.1.3.7 +// ES6 draft 04-05-14, section 21.1.3.7 function StringEndsWith(searchString /* position */) { // length == 1 CHECK_OBJECT_COERCIBLE(this, "String.prototype.endsWith"); @@ -114,11 +91,17 @@ function StringEndsWith(searchString /* position */) { // length == 1 } -// ES6 draft 01-20-14, section 21.1.3.6 +// ES6 draft 04-05-14, section 21.1.3.6 function StringContains(searchString /* position */) { // length == 1 CHECK_OBJECT_COERCIBLE(this, "String.prototype.contains"); var s = TO_STRING_INLINE(this); + + if (IS_REGEXP(searchString)) { + throw MakeTypeError("first_argument_not_regexp", + ["String.prototype.contains"]); + } + var ss = TO_STRING_INLINE(searchString); var pos = 0; if (%_ArgumentsLength() > 1) { diff --git a/deps/v8/src/hashmap.h b/deps/v8/src/hashmap.h index 11f6ace7d..4a363b7ce 100644 --- a/deps/v8/src/hashmap.h +++ b/deps/v8/src/hashmap.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HASHMAP_H_ #define V8_HASHMAP_H_ @@ -98,6 +75,11 @@ class TemplateHashMapImpl { Entry* Start() const; Entry* Next(Entry* p) const; + // Some match functions defined for convenience. + static bool PointersMatch(void* key1, void* key2) { + return key1 == key2; + } + private: MatchFun match_; Entry* map_; diff --git a/deps/v8/src/heap-inl.h b/deps/v8/src/heap-inl.h index 063cf30ff..3cddcb91d 100644 --- a/deps/v8/src/heap-inl.h +++ b/deps/v8/src/heap-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HEAP_INL_H_ #define V8_HEAP_INL_H_ @@ -36,7 +13,6 @@ #include "list-inl.h" #include "objects.h" #include "platform.h" -#include "v8-counters.h" #include "store-buffer.h" #include "store-buffer-inl.h" @@ -85,22 +61,6 @@ void PromotionQueue::ActivateGuardIfOnTheSamePage() { } -MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str, - PretenureFlag pretenure) { - // Check for ASCII first since this is the common case. - const char* start = str.start(); - int length = str.length(); - int non_ascii_start = String::NonAsciiStart(start, length); - if (non_ascii_start >= length) { - // If the string is ASCII, we do not need to convert the characters - // since UTF8 is backwards compatible with ASCII. - return AllocateStringFromOneByte(str, pretenure); - } - // Non-ASCII and we need to decode. - return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure); -} - - template<> bool inline Heap::IsOneByte(Vector<const char> str, int chars) { // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? @@ -115,7 +75,7 @@ bool inline Heap::IsOneByte(String* str, int chars) { } -MaybeObject* Heap::AllocateInternalizedStringFromUtf8( +AllocationResult Heap::AllocateInternalizedStringFromUtf8( Vector<const char> str, int chars, uint32_t hash_field) { if (IsOneByte(str, chars)) { return AllocateOneByteInternalizedString( @@ -126,7 +86,7 @@ MaybeObject* Heap::AllocateInternalizedStringFromUtf8( template<typename T> -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field) { if (IsOneByte(t, chars)) { return AllocateInternalizedStringImpl<true>(t, chars, hash_field); @@ -135,8 +95,9 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( } -MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str, - uint32_t hash_field) { +AllocationResult Heap::AllocateOneByteInternalizedString( + Vector<const uint8_t> str, + uint32_t hash_field) { if (str.length() > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -146,13 +107,13 @@ MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // String maps are all immortal immovable objects. - reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(str.length()); @@ -168,8 +129,8 @@ MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str, } -MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, - uint32_t hash_field) { +AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, + uint32_t hash_field) { if (str.length() > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -179,12 +140,12 @@ MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast<HeapObject*>(result)->set_map(map); + result->set_map(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(str.length()); @@ -199,24 +160,27 @@ MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, return answer; } -MaybeObject* Heap::CopyFixedArray(FixedArray* src) { +AllocationResult Heap::CopyFixedArray(FixedArray* src) { + if (src->length() == 0) return src; return CopyFixedArrayWithMap(src, src->map()); } -MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { +AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { + if (src->length() == 0) return src; return CopyFixedDoubleArrayWithMap(src, src->map()); } -MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) { +AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { + if (src->length() == 0) return src; return CopyConstantPoolArrayWithMap(src, src->map()); } -MaybeObject* Heap::AllocateRaw(int size_in_bytes, - AllocationSpace space, - AllocationSpace retry_space) { +AllocationResult Heap::AllocateRaw(int size_in_bytes, + AllocationSpace space, + AllocationSpace retry_space) { ASSERT(AllowHandleAllocation::IsAllowed()); ASSERT(AllowHeapAllocation::IsAllowed()); ASSERT(gc_state_ == NOT_IN_GC); @@ -225,66 +189,49 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes, if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && Heap::allocation_timeout_-- <= 0) { - return Failure::RetryAfterGC(space); + return AllocationResult::Retry(space); } isolate_->counters()->objs_since_last_full()->Increment(); isolate_->counters()->objs_since_last_young()->Increment(); #endif HeapObject* object; - MaybeObject* result; + AllocationResult allocation; if (NEW_SPACE == space) { - result = new_space_.AllocateRaw(size_in_bytes); - if (always_allocate() && result->IsFailure() && retry_space != NEW_SPACE) { + allocation = new_space_.AllocateRaw(size_in_bytes); + if (always_allocate() && + allocation.IsRetry() && + retry_space != NEW_SPACE) { space = retry_space; } else { - if (profiler->is_tracking_allocations() && result->To(&object)) { + if (profiler->is_tracking_allocations() && allocation.To(&object)) { profiler->AllocationEvent(object->address(), size_in_bytes); } - return result; + return allocation; } } if (OLD_POINTER_SPACE == space) { - result = old_pointer_space_->AllocateRaw(size_in_bytes); + allocation = old_pointer_space_->AllocateRaw(size_in_bytes); } else if (OLD_DATA_SPACE == space) { - result = old_data_space_->AllocateRaw(size_in_bytes); + allocation = old_data_space_->AllocateRaw(size_in_bytes); } else if (CODE_SPACE == space) { - result = code_space_->AllocateRaw(size_in_bytes); + allocation = code_space_->AllocateRaw(size_in_bytes); } else if (LO_SPACE == space) { - result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); + allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); } else if (CELL_SPACE == space) { - result = cell_space_->AllocateRaw(size_in_bytes); + allocation = cell_space_->AllocateRaw(size_in_bytes); } else if (PROPERTY_CELL_SPACE == space) { - result = property_cell_space_->AllocateRaw(size_in_bytes); + allocation = property_cell_space_->AllocateRaw(size_in_bytes); } else { ASSERT(MAP_SPACE == space); - result = map_space_->AllocateRaw(size_in_bytes); + allocation = map_space_->AllocateRaw(size_in_bytes); } - if (result->IsFailure()) old_gen_exhausted_ = true; - if (profiler->is_tracking_allocations() && result->To(&object)) { + if (allocation.IsRetry()) old_gen_exhausted_ = true; + if (profiler->is_tracking_allocations() && allocation.To(&object)) { profiler->AllocationEvent(object->address(), size_in_bytes); } - return result; -} - - -MaybeObject* Heap::NumberFromInt32( - int32_t value, PretenureFlag pretenure) { - if (Smi::IsValid(value)) return Smi::FromInt(value); - // Bypass NumberFromDouble to avoid various redundant checks. - return AllocateHeapNumber(FastI2D(value), pretenure); -} - - -MaybeObject* Heap::NumberFromUint32( - uint32_t value, PretenureFlag pretenure) { - if (static_cast<int32_t>(value) >= 0 && - Smi::IsValid(static_cast<int32_t>(value))) { - return Smi::FromInt(static_cast<int32_t>(value)); - } - // Bypass NumberFromDouble to avoid various redundant checks. - return AllocateHeapNumber(FastUI2D(value), pretenure); + return allocation; } @@ -453,6 +400,8 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { case PROPERTY_CELL_SPACE: case LO_SPACE: return false; + case INVALID_SPACE: + break; } UNREACHABLE(); return false; @@ -490,33 +439,51 @@ void Heap::ScavengePointer(HeapObject** p) { } -void Heap::UpdateAllocationSiteFeedback(HeapObject* object, - ScratchpadSlotMode mode) { - Heap* heap = object->GetHeap(); - ASSERT(heap->InFromSpace(object)); - - if (!FLAG_allocation_site_pretenuring || - !AllocationSite::CanTrack(object->map()->instance_type())) return; - +AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { // Check if there is potentially a memento behind the object. If // the last word of the momento is on another page we return - // immediatelly. Note that we do not have to compare with the current - // top pointer of the from space page, since we always install filler - // objects above the top pointer of a from space page when performing - // a garbage collection. + // immediately. Address object_address = object->address(); Address memento_address = object_address + object->Size(); Address last_memento_word_address = memento_address + kPointerSize; if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) { - return; + return NULL; } HeapObject* candidate = HeapObject::FromAddress(memento_address); - if (candidate->map() != heap->allocation_memento_map()) return; + if (candidate->map() != allocation_memento_map()) return NULL; + + // Either the object is the last object in the new space, or there is another + // object of at least word size (the header map word) following it, so + // suffices to compare ptr and top here. Note that technically we do not have + // to compare with the current top pointer of the from space page during GC, + // since we always install filler objects above the top pointer of a from + // space page when performing a garbage collection. However, always performing + // the test makes it possible to have a single, unified version of + // FindAllocationMemento that is used both by the GC and the mutator. + Address top = NewSpaceTop(); + ASSERT(memento_address == top || + memento_address + HeapObject::kHeaderSize <= top || + !NewSpacePage::OnSamePage(memento_address, top)); + if (memento_address == top) return NULL; AllocationMemento* memento = AllocationMemento::cast(candidate); - if (!memento->IsValid()) return; + if (!memento->IsValid()) return NULL; + return memento; +} + + +void Heap::UpdateAllocationSiteFeedback(HeapObject* object, + ScratchpadSlotMode mode) { + Heap* heap = object->GetHeap(); + ASSERT(heap->InFromSpace(object)); + + if (!FLAG_allocation_site_pretenuring || + !AllocationSite::CanTrack(object->map()->instance_type())) return; + + AllocationMemento* memento = heap->FindAllocationMemento(object); + if (memento == NULL) return; if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); @@ -560,25 +527,6 @@ bool Heap::CollectGarbage(AllocationSpace space, } -MaybeObject* Heap::PrepareForCompare(String* str) { - // Always flatten small strings and force flattening of long strings - // after we have accumulated a certain amount we failed to flatten. - static const int kMaxAlwaysFlattenLength = 32; - static const int kFlattenLongThreshold = 16*KB; - - const int length = str->length(); - MaybeObject* obj = str->TryFlatten(); - if (length <= kMaxAlwaysFlattenLength || - unflattened_strings_length_ >= kFlattenLongThreshold) { - return obj; - } - if (obj->IsFailure()) { - unflattened_strings_length_ += length; - } - return str; -} - - int64_t Heap::AdjustAmountOfExternalAllocatedMemory( int64_t change_in_bytes) { ASSERT(HasBeenSetUp()); @@ -627,13 +575,6 @@ Isolate* Heap::isolate() { } -#ifdef DEBUG -#define GC_GREEDY_CHECK(ISOLATE) \ - if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck() -#else -#define GC_GREEDY_CHECK(ISOLATE) { } -#endif - // Calls the FUNCTION_CALL function and retries it up to three times // to guarantee that any allocations performed during the call will // succeed if there's enough memory. @@ -641,30 +582,31 @@ Isolate* Heap::isolate() { // Warning: Do not use the identifiers __object__, __maybe_object__ or // __scope__ in a call to this macro. +#define RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ + if (!__allocation__.IsRetry()) { \ + __object__ = __allocation__.ToObjectChecked(); \ + if (__object__ == (ISOLATE)->heap()->exception()) { RETURN_EMPTY; } \ + RETURN_VALUE; \ + } + #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ do { \ - GC_GREEDY_CHECK(ISOLATE); \ - MaybeObject* __maybe_object__ = FUNCTION_CALL; \ + AllocationResult __allocation__ = FUNCTION_CALL; \ Object* __object__ = NULL; \ - if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ - if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ - (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ - allocation_space(), \ - "allocation failure"); \ - __maybe_object__ = FUNCTION_CALL; \ - if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ - if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ + RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ + (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ + "allocation failure"); \ + __allocation__ = FUNCTION_CALL; \ + RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ { \ AlwaysAllocateScope __scope__(ISOLATE); \ - __maybe_object__ = FUNCTION_CALL; \ + __allocation__ = FUNCTION_CALL; \ } \ - if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ - if (__maybe_object__->IsRetryAfterGC()) { \ + RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ /* TODO(1181417): Fix this. */ \ - v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\ - } \ + v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ RETURN_EMPTY; \ } while (false) @@ -687,13 +629,6 @@ Isolate* Heap::isolate() { CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) -#define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \ - CALL_AND_RETRY(ISOLATE, \ - FUNCTION_CALL, \ - return __object__, \ - return __maybe_object__) - - void ExternalStringTable::AddString(String* string) { ASSERT(string->IsExternalString()); if (heap_->InNewSpace(string)) { diff --git a/deps/v8/src/heap-profiler.cc b/deps/v8/src/heap-profiler.cc index 1dc111321..6068bf43b 100644 --- a/deps/v8/src/heap-profiler.cc +++ b/deps/v8/src/heap-profiler.cc @@ -1,29 +1,6 @@ // Copyright 2009-2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/heap-profiler.h b/deps/v8/src/heap-profiler.h index e4838df13..6fdfd955e 100644 --- a/deps/v8/src/heap-profiler.h +++ b/deps/v8/src/heap-profiler.h @@ -1,29 +1,6 @@ // Copyright 2009-2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HEAP_PROFILER_H_ #define V8_HEAP_PROFILER_H_ diff --git a/deps/v8/src/heap-snapshot-generator-inl.h b/deps/v8/src/heap-snapshot-generator-inl.h index 582de32c2..90f3b1bc1 100644 --- a/deps/v8/src/heap-snapshot-generator-inl.h +++ b/deps/v8/src/heap-snapshot-generator-inl.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HEAP_SNAPSHOT_GENERATOR_INL_H_ #define V8_HEAP_SNAPSHOT_GENERATOR_INL_H_ diff --git a/deps/v8/src/heap-snapshot-generator.cc b/deps/v8/src/heap-snapshot-generator.cc index 332d0dbf6..cafee77b4 100644 --- a/deps/v8/src/heap-snapshot-generator.cc +++ b/deps/v8/src/heap-snapshot-generator.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -31,10 +8,10 @@ #include "allocation-tracker.h" #include "code-stubs.h" -#include "heap-profiler.h" +#include "conversions.h" #include "debug.h" +#include "heap-profiler.h" #include "types.h" -#include "v8conversions.h" namespace v8 { namespace internal { @@ -732,7 +709,7 @@ size_t HeapObjectsMap::GetUsedMemorySize() const { HeapEntriesMap::HeapEntriesMap() - : entries_(HeapThingsMatch) { + : entries_(HashMap::PointersMatch) { } @@ -751,7 +728,7 @@ void HeapEntriesMap::Pair(HeapThing thing, int entry) { HeapObjectsSet::HeapObjectsSet() - : entries_(HeapEntriesMap::HeapThingsMatch) { + : entries_(HashMap::PointersMatch) { } @@ -1079,21 +1056,30 @@ class IndexedReferencesExtractor : public ObjectVisitor { static void MarkVisitedField(HeapObject* obj, int offset) { if (offset < 0) return; Address field = obj->address() + offset; - ASSERT(!Memory::Object_at(field)->IsFailure()); ASSERT(Memory::Object_at(field)->IsHeapObject()); - *field |= kFailureTag; + intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field)); + ASSERT(!IsMarked(p)); + intptr_t p_tagged = p | kTag; + Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged); } private: bool CheckVisitedAndUnmark(Object** field) { - if ((*field)->IsFailure()) { - intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask; - *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag); + intptr_t p = reinterpret_cast<intptr_t>(*field); + if (IsMarked(p)) { + intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag; + *field = reinterpret_cast<Object*>(p_untagged); ASSERT((*field)->IsHeapObject()); return true; } return false; } + + static const intptr_t kTaggingMask = 3; + static const intptr_t kTag = 3; + + static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; } + V8HeapExplorer* generator_; HeapObject* parent_obj_; int parent_; @@ -1101,10 +1087,8 @@ class IndexedReferencesExtractor : public ObjectVisitor { }; -void V8HeapExplorer::ExtractReferences(HeapObject* obj) { - HeapEntry* heap_entry = GetEntry(obj); - if (heap_entry == NULL) return; // No interest in this object. - int entry = heap_entry->index(); +bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) { + if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2 if (obj->IsJSGlobalProxy()) { ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); @@ -1114,8 +1098,6 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) { ExtractJSObjectReferences(entry, JSObject::cast(obj)); } else if (obj->IsString()) { ExtractStringReferences(entry, String::cast(obj)); - } else if (obj->IsContext()) { - ExtractContextReferences(entry, Context::cast(obj)); } else if (obj->IsMap()) { ExtractMapReferences(entry, Map::cast(obj)); } else if (obj->IsSharedFunctionInfo()) { @@ -1137,12 +1119,19 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) { } else if (obj->IsAllocationSite()) { ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj)); } - SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset); + return true; +} + - // Extract unvisited fields as hidden references and restore tags - // of visited fields. - IndexedReferencesExtractor refs_extractor(this, obj, entry); - obj->Iterate(&refs_extractor); +bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) { + if (!obj->IsFixedArray()) return false; + + if (obj->IsContext()) { + ExtractContextReferences(entry, Context::cast(obj)); + } else { + ExtractFixedArrayReferences(entry, FixedArray::cast(obj)); + } + return true; } @@ -1319,6 +1308,22 @@ void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { TagObject(back_pointer, "(back pointer)"); SetInternalReference(transitions, transitions_entry, "back_pointer", back_pointer); + + if (FLAG_collect_maps && map->CanTransition()) { + if (!transitions->IsSimpleTransition()) { + if (transitions->HasPrototypeTransitions()) { + FixedArray* prototype_transitions = + transitions->GetPrototypeTransitions(); + MarkAsWeakContainer(prototype_transitions); + TagObject(prototype_transitions, "(prototype transitions"); + SetInternalReference(transitions, transitions_entry, + "prototype_transitions", prototype_transitions); + } + // TODO(alph): transitions keys are strong links. + MarkAsWeakContainer(transitions); + } + } + TagObject(transitions, "(transition array)"); SetInternalReference(map, entry, "transitions", transitions, @@ -1336,6 +1341,7 @@ void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { "descriptors", descriptors, Map::kDescriptorsOffset); + MarkAsWeakContainer(map->code_cache()); SetInternalReference(map, entry, "code_cache", map->code_cache(), Map::kCodeCacheOffset); @@ -1345,6 +1351,7 @@ void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { "constructor", map->constructor(), Map::kConstructorOffset); TagObject(map->dependent_code(), "(dependent code)"); + MarkAsWeakContainer(map->dependent_code()); SetInternalReference(map, entry, "dependent_code", map->dependent_code(), Map::kDependentCodeOffset); @@ -1399,6 +1406,9 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences( SetInternalReference(obj, entry, "optimized_code_map", shared->optimized_code_map(), SharedFunctionInfo::kOptimizedCodeMapOffset); + SetInternalReference(obj, entry, + "feedback_vector", shared->feedback_vector(), + SharedFunctionInfo::kFeedbackVectorOffset); SetWeakReference(obj, entry, "initial_map", shared->initial_map(), SharedFunctionInfo::kInitialMapOffset); @@ -1506,6 +1516,7 @@ void V8HeapExplorer::ExtractPropertyCellReferences(int entry, ExtractCellReferences(entry, cell); SetInternalReference(cell, entry, "type", cell->type(), PropertyCell::kTypeOffset); + MarkAsWeakContainer(cell->dependent_code()); SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(), PropertyCell::kDependentCodeOffset); } @@ -1517,6 +1528,7 @@ void V8HeapExplorer::ExtractAllocationSiteReferences(int entry, AllocationSite::kTransitionInfoOffset); SetInternalReference(site, entry, "nested_site", site->nested_site(), AllocationSite::kNestedSiteOffset); + MarkAsWeakContainer(site->dependent_code()); SetInternalReference(site, entry, "dependent_code", site->dependent_code(), AllocationSite::kDependentCodeOffset); // Do not visit weak_next as it is not visited by the StaticVisitor, @@ -1562,6 +1574,20 @@ void V8HeapExplorer::ExtractJSArrayBufferReferences( } +void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) { + bool is_weak = weak_containers_.Contains(array); + for (int i = 0, l = array->length(); i < l; ++i) { + if (is_weak) { + SetWeakReference(array, entry, + i, array->get(i), array->OffsetOfElementAt(i)); + } else { + SetInternalReference(array, entry, + i, array->get(i), array->OffsetOfElementAt(i)); + } + } +} + + void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) { if (!js_obj->IsJSFunction()) return; @@ -1634,7 +1660,6 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { case HANDLER: // only in lookup results, not in descriptors case INTERCEPTOR: // only in lookup results, not in descriptors break; - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; @@ -1725,8 +1750,10 @@ String* V8HeapExplorer::GetConstructorName(JSObject* object) { // return its name. This is for instances of binding objects, which // have prototype constructor type "Object". Object* constructor_prop = NULL; - LookupResult result(heap->isolate()); - object->LocalLookupRealNamedProperty(heap->constructor_string(), &result); + Isolate* isolate = heap->isolate(); + LookupResult result(isolate); + object->LocalLookupRealNamedProperty( + isolate->factory()->constructor_string(), &result); if (!result.IsFound()) return object->constructor_name(); constructor_prop = result.GetLazyValue(); @@ -1833,6 +1860,25 @@ bool V8HeapExplorer::IterateAndExtractReferences( heap_->IterateRoots(&extractor, VISIT_ALL); extractor.FillReferences(this); + // We have to do two passes as sometimes FixedArrays are used + // to weakly hold their items, and it's impossible to distinguish + // between these cases without processing the array owner first. + bool interrupted = + IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() || + IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>(); + + if (interrupted) { + filler_ = NULL; + return false; + } + + filler_ = NULL; + return progress_->ProgressReport(true); +} + + +template<V8HeapExplorer::ExtractReferencesMethod extractor> +bool V8HeapExplorer::IterateAndExtractSinglePass() { // Now iterate the whole heap. bool interrupted = false; HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); @@ -1840,18 +1886,22 @@ bool V8HeapExplorer::IterateAndExtractReferences( for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next(), progress_->ProgressStep()) { - if (!interrupted) { - ExtractReferences(obj); - if (!progress_->ProgressReport(false)) interrupted = true; + if (interrupted) continue; + + HeapEntry* heap_entry = GetEntry(obj); + int entry = heap_entry->index(); + if ((this->*extractor)(entry, obj)) { + SetInternalReference(obj, entry, + "map", obj->map(), HeapObject::kMapOffset); + // Extract unvisited fields as hidden references and restore tags + // of visited fields. + IndexedReferencesExtractor refs_extractor(this, obj, entry); + obj->Iterate(&refs_extractor); } - } - if (interrupted) { - filler_ = NULL; - return false; - } - filler_ = NULL; - return progress_->ProgressReport(true); + if (!progress_->ProgressReport(false)) interrupted = true; + } + return interrupted; } @@ -1987,6 +2037,24 @@ void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, } +void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, + int parent_entry, + int index, + Object* child_obj, + int field_offset) { + ASSERT(parent_entry == GetEntry(parent_obj)->index()); + HeapEntry* child_entry = GetEntry(child_obj); + if (child_entry == NULL) return; + if (IsEssentialObject(child_obj)) { + filler_->SetNamedReference(HeapGraphEdge::kWeak, + parent_entry, + names_->GetFormatted("%d", index), + child_entry); + } + IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); +} + + void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, int parent_entry, Name* reference_name, @@ -2072,9 +2140,7 @@ void V8HeapExplorer::SetGcSubrootReference( GlobalObject* global = context->global_object(); if (global->IsJSGlobalObject()) { bool is_debug_object = false; -#ifdef ENABLE_DEBUGGER_SUPPORT is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global); -#endif if (!is_debug_object && !user_roots_.Contains(global)) { user_roots_.Insert(global); SetUserGlobalReference(global); @@ -2114,6 +2180,13 @@ void V8HeapExplorer::TagObject(Object* obj, const char* tag) { } +void V8HeapExplorer::MarkAsWeakContainer(Object* object) { + if (IsEssentialObject(object) && object->IsFixedArray()) { + weak_containers_.Insert(object); + } +} + + class GlobalObjectsEnumerator : public ObjectVisitor { public: virtual void VisitPointers(Object** start, Object** end) { @@ -2504,7 +2577,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() { debug_heap->Verify(); #endif - SetProgressTotal(1); // 1 pass. + SetProgressTotal(2); // 2 passes. #ifdef VERIFY_HEAP debug_heap->Verify(); diff --git a/deps/v8/src/heap-snapshot-generator.h b/deps/v8/src/heap-snapshot-generator.h index 634ede19a..a0f2a6293 100644 --- a/deps/v8/src/heap-snapshot-generator.h +++ b/deps/v8/src/heap-snapshot-generator.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HEAP_SNAPSHOT_GENERATOR_H_ #define V8_HEAP_SNAPSHOT_GENERATOR_H_ @@ -315,9 +292,6 @@ class HeapEntriesMap { static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)), v8::internal::kZeroHashSeed); } - static bool HeapThingsMatch(HeapThing key1, HeapThing key2) { - return key1 == key2; - } HashMap entries_; @@ -376,6 +350,9 @@ class V8HeapExplorer : public HeapEntriesAllocator { static HeapObject* const kInternalRootObject; private: + typedef bool (V8HeapExplorer::*ExtractReferencesMethod)(int entry, + HeapObject* object); + HeapEntry* AddEntry(HeapObject* object); HeapEntry* AddEntry(HeapObject* object, HeapEntry::Type type, @@ -383,7 +360,11 @@ class V8HeapExplorer : public HeapEntriesAllocator { const char* GetSystemEntryName(HeapObject* object); - void ExtractReferences(HeapObject* obj); + template<V8HeapExplorer::ExtractReferencesMethod extractor> + bool IterateAndExtractSinglePass(); + + bool ExtractReferencesPass1(int entry, HeapObject* obj); + bool ExtractReferencesPass2(int entry, HeapObject* obj); void ExtractJSGlobalProxyReferences(int entry, JSGlobalProxy* proxy); void ExtractJSObjectReferences(int entry, JSObject* js_obj); void ExtractStringReferences(int entry, String* obj); @@ -400,12 +381,14 @@ class V8HeapExplorer : public HeapEntriesAllocator { void ExtractPropertyCellReferences(int entry, PropertyCell* cell); void ExtractAllocationSiteReferences(int entry, AllocationSite* site); void ExtractJSArrayBufferReferences(int entry, JSArrayBuffer* buffer); + void ExtractFixedArrayReferences(int entry, FixedArray* array); void ExtractClosureReferences(JSObject* js_obj, int entry); void ExtractPropertyReferences(JSObject* js_obj, int entry); bool ExtractAccessorPairProperty(JSObject* js_obj, int entry, Object* key, Object* callback_obj); void ExtractElementReferences(JSObject* js_obj, int entry); void ExtractInternalReferences(JSObject* js_obj, int entry); + bool IsEssentialObject(Object* object); void SetContextReference(HeapObject* parent_obj, int parent, @@ -439,6 +422,11 @@ class V8HeapExplorer : public HeapEntriesAllocator { const char* reference_name, Object* child_obj, int field_offset); + void SetWeakReference(HeapObject* parent_obj, + int parent, + int index, + Object* child_obj, + int field_offset); void SetPropertyReference(HeapObject* parent_obj, int parent, Name* reference_name, @@ -452,6 +440,7 @@ class V8HeapExplorer : public HeapEntriesAllocator { VisitorSynchronization::SyncTag tag, bool is_weak, Object* child); const char* GetStrongGcSubrootName(Object* object); void TagObject(Object* obj, const char* tag); + void MarkAsWeakContainer(Object* object); HeapEntry* GetEntry(Object* obj); @@ -467,6 +456,7 @@ class V8HeapExplorer : public HeapEntriesAllocator { HeapObjectsSet objects_tags_; HeapObjectsSet strong_gc_subroot_names_; HeapObjectsSet user_roots_; + HeapObjectsSet weak_containers_; v8::HeapProfiler::ObjectNameResolver* global_object_name_resolver_; static HeapObject* const kGcRootsObject; diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc index 6374433bb..13771e613 100644 --- a/deps/v8/src/heap.cc +++ b/deps/v8/src/heap.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -32,6 +9,7 @@ #include "bootstrapper.h" #include "codegen.h" #include "compilation-cache.h" +#include "conversions.h" #include "cpu-profiler.h" #include "debug.h" #include "deoptimizer.h" @@ -49,9 +27,8 @@ #include "snapshot.h" #include "store-buffer.h" #include "utils/random-number-generator.h" -#include "v8conversions.h" +#include "utils.h" #include "v8threads.h" -#include "v8utils.h" #include "vm-state-inl.h" #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP #include "regexp-macro-assembler.h" @@ -68,7 +45,7 @@ namespace internal { Heap::Heap() : isolate_(NULL), - code_range_size_(kIs64BitArch ? 512 * MB : 0), + code_range_size_(0), // semispace_size_ should be a power of 2 and old_generation_size_ should be // a multiple of Page::kPageSize. reserved_semispace_size_(8 * (kPointerSize / 4) * MB), @@ -81,6 +58,7 @@ Heap::Heap() // Will be 4 * reserved_semispace_size_ to ensure that young // generation can be aligned to its size. maximum_committed_(0), + old_space_growing_factor_(4), survived_since_last_expansion_(0), sweep_generation_(0), always_allocate_scope_depth_(0), @@ -165,15 +143,6 @@ Heap::Heap() // Ensure old_generation_size_ is a multiple of kPageSize. ASSERT(MB >= Page::kPageSize); - intptr_t max_virtual = OS::MaxVirtualMemory(); - - if (max_virtual > 0) { - if (code_range_size_ > 0) { - // Reserve no more than 1/8 of the memory for the code range. - code_range_size_ = Min(code_range_size_, max_virtual >> 3); - } - } - memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); native_contexts_list_ = NULL; array_buffers_list_ = Smi::FromInt(0); @@ -719,12 +688,10 @@ void Heap::GarbageCollectionEpilogue() { #undef UPDATE_FRAGMENTATION_FOR_SPACE #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE -#if defined(DEBUG) +#ifdef DEBUG ReportStatisticsAfterGC(); #endif // DEBUG -#ifdef ENABLE_DEBUGGER_SUPPORT isolate_->debug()->AfterGarbageCollection(); -#endif // ENABLE_DEBUGGER_SUPPORT } @@ -947,14 +914,14 @@ void Heap::ReserveSpace(int *sizes, Address *locations_out) { ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { if (sizes[space] != 0) { - MaybeObject* allocation; + AllocationResult allocation; if (space == NEW_SPACE) { allocation = new_space()->AllocateRaw(sizes[space]); } else { allocation = paged_space(space)->AllocateRaw(sizes[space]); } FreeListNode* node; - if (!allocation->To<FreeListNode>(&node)) { + if (!allocation.To(&node)) { if (space == NEW_SPACE) { Heap::CollectGarbage(NEW_SPACE, "failed to reserve space in the new space"); @@ -1527,9 +1494,6 @@ void Heap::Scavenge() { incremental_marking()->PrepareForScavenge(); - paged_space(OLD_DATA_SPACE)->EnsureSweeperProgress(new_space_.Size()); - paged_space(OLD_POINTER_SPACE)->EnsureSweeperProgress(new_space_.Size()); - // Flip the semispaces. After flipping, to space is empty, from space has // live objects. new_space_.Flip(); @@ -1719,180 +1683,6 @@ void Heap::UpdateReferencesInExternalStringTable( } -template <class T> -struct WeakListVisitor; - - -template <class T> -static Object* VisitWeakList(Heap* heap, - Object* list, - WeakObjectRetainer* retainer, - bool record_slots) { - Object* undefined = heap->undefined_value(); - Object* head = undefined; - T* tail = NULL; - MarkCompactCollector* collector = heap->mark_compact_collector(); - while (list != undefined) { - // Check whether to keep the candidate in the list. - T* candidate = reinterpret_cast<T*>(list); - Object* retained = retainer->RetainAs(list); - if (retained != NULL) { - if (head == undefined) { - // First element in the list. - head = retained; - } else { - // Subsequent elements in the list. - ASSERT(tail != NULL); - WeakListVisitor<T>::SetWeakNext(tail, retained); - if (record_slots) { - Object** next_slot = - HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); - collector->RecordSlot(next_slot, next_slot, retained); - } - } - // Retained object is new tail. - ASSERT(!retained->IsUndefined()); - candidate = reinterpret_cast<T*>(retained); - tail = candidate; - - - // tail is a live object, visit it. - WeakListVisitor<T>::VisitLiveObject( - heap, tail, retainer, record_slots); - } else { - WeakListVisitor<T>::VisitPhantomObject(heap, candidate); - } - - // Move to next element in the list. - list = WeakListVisitor<T>::WeakNext(candidate); - } - - // Terminate the list if there is one or more elements. - if (tail != NULL) { - WeakListVisitor<T>::SetWeakNext(tail, undefined); - } - return head; -} - - -template <class T> -static void ClearWeakList(Heap* heap, - Object* list) { - Object* undefined = heap->undefined_value(); - while (list != undefined) { - T* candidate = reinterpret_cast<T*>(list); - list = WeakListVisitor<T>::WeakNext(candidate); - WeakListVisitor<T>::SetWeakNext(candidate, undefined); - } -} - - -template<> -struct WeakListVisitor<JSFunction> { - static void SetWeakNext(JSFunction* function, Object* next) { - function->set_next_function_link(next); - } - - static Object* WeakNext(JSFunction* function) { - return function->next_function_link(); - } - - static int WeakNextOffset() { - return JSFunction::kNextFunctionLinkOffset; - } - - static void VisitLiveObject(Heap*, JSFunction*, - WeakObjectRetainer*, bool) { - } - - static void VisitPhantomObject(Heap*, JSFunction*) { - } -}; - - -template<> -struct WeakListVisitor<Code> { - static void SetWeakNext(Code* code, Object* next) { - code->set_next_code_link(next); - } - - static Object* WeakNext(Code* code) { - return code->next_code_link(); - } - - static int WeakNextOffset() { - return Code::kNextCodeLinkOffset; - } - - static void VisitLiveObject(Heap*, Code*, - WeakObjectRetainer*, bool) { - } - - static void VisitPhantomObject(Heap*, Code*) { - } -}; - - -template<> -struct WeakListVisitor<Context> { - static void SetWeakNext(Context* context, Object* next) { - context->set(Context::NEXT_CONTEXT_LINK, - next, - UPDATE_WRITE_BARRIER); - } - - static Object* WeakNext(Context* context) { - return context->get(Context::NEXT_CONTEXT_LINK); - } - - static void VisitLiveObject(Heap* heap, - Context* context, - WeakObjectRetainer* retainer, - bool record_slots) { - // Process the three weak lists linked off the context. - DoWeakList<JSFunction>(heap, context, retainer, record_slots, - Context::OPTIMIZED_FUNCTIONS_LIST); - DoWeakList<Code>(heap, context, retainer, record_slots, - Context::OPTIMIZED_CODE_LIST); - DoWeakList<Code>(heap, context, retainer, record_slots, - Context::DEOPTIMIZED_CODE_LIST); - } - - template<class T> - static void DoWeakList(Heap* heap, - Context* context, - WeakObjectRetainer* retainer, - bool record_slots, - int index) { - // Visit the weak list, removing dead intermediate elements. - Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer, - record_slots); - - // Update the list head. - context->set(index, list_head, UPDATE_WRITE_BARRIER); - - if (record_slots) { - // Record the updated slot if necessary. - Object** head_slot = HeapObject::RawField( - context, FixedArray::SizeFor(index)); - heap->mark_compact_collector()->RecordSlot( - head_slot, head_slot, list_head); - } - } - - static void VisitPhantomObject(Heap* heap, Context* context) { - ClearWeakList<JSFunction>(heap, - context->get(Context::OPTIMIZED_FUNCTIONS_LIST)); - ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST)); - ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); - } - - static int WeakNextOffset() { - return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); - } -}; - - void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { // We don't record weak slots during marking or scavenges. // Instead we do it once when we complete mark-compact cycle. @@ -1918,66 +1708,6 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, } -template<> -struct WeakListVisitor<JSArrayBufferView> { - static void SetWeakNext(JSArrayBufferView* obj, Object* next) { - obj->set_weak_next(next); - } - - static Object* WeakNext(JSArrayBufferView* obj) { - return obj->weak_next(); - } - - static void VisitLiveObject(Heap*, - JSArrayBufferView* obj, - WeakObjectRetainer* retainer, - bool record_slots) {} - - static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} - - static int WeakNextOffset() { - return JSArrayBufferView::kWeakNextOffset; - } -}; - - -template<> -struct WeakListVisitor<JSArrayBuffer> { - static void SetWeakNext(JSArrayBuffer* obj, Object* next) { - obj->set_weak_next(next); - } - - static Object* WeakNext(JSArrayBuffer* obj) { - return obj->weak_next(); - } - - static void VisitLiveObject(Heap* heap, - JSArrayBuffer* array_buffer, - WeakObjectRetainer* retainer, - bool record_slots) { - Object* typed_array_obj = - VisitWeakList<JSArrayBufferView>( - heap, - array_buffer->weak_first_view(), - retainer, record_slots); - array_buffer->set_weak_first_view(typed_array_obj); - if (typed_array_obj != heap->undefined_value() && record_slots) { - Object** slot = HeapObject::RawField( - array_buffer, JSArrayBuffer::kWeakFirstViewOffset); - heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); - } - } - - static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { - Runtime::FreeArrayBuffer(heap->isolate(), phantom); - } - - static int WeakNextOffset() { - return JSArrayBuffer::kWeakNextOffset; - } -}; - - void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots) { Object* array_buffer_obj = @@ -1999,29 +1729,6 @@ void Heap::TearDownArrayBuffers() { } -template<> -struct WeakListVisitor<AllocationSite> { - static void SetWeakNext(AllocationSite* obj, Object* next) { - obj->set_weak_next(next); - } - - static Object* WeakNext(AllocationSite* obj) { - return obj->weak_next(); - } - - static void VisitLiveObject(Heap* heap, - AllocationSite* site, - WeakObjectRetainer* retainer, - bool record_slots) {} - - static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} - - static int WeakNextOffset() { - return AllocationSite::kWeakNextOffset; - } -}; - - void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots) { Object* allocation_site_obj = @@ -2338,20 +2045,18 @@ class ScavengingVisitor : public StaticVisitorBase { Heap* heap = map->GetHeap(); if (heap->ShouldBePromoted(object->address(), object_size)) { - MaybeObject* maybe_result; + AllocationResult allocation; if (object_contents == DATA_OBJECT) { ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); - maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); + allocation = heap->old_data_space()->AllocateRaw(allocation_size); } else { ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); - maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); + allocation = heap->old_pointer_space()->AllocateRaw(allocation_size); } - Object* result = NULL; // Initialization to please compiler. - if (maybe_result->ToObject(&result)) { - HeapObject* target = HeapObject::cast(result); - + HeapObject* target = NULL; // Initialization to please compiler. + if (allocation.To(&target)) { if (alignment != kObjectAlignment) { target = EnsureDoubleAligned(heap, target, allocation_size); } @@ -2376,10 +2081,10 @@ class ScavengingVisitor : public StaticVisitorBase { } } ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); - MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); + AllocationResult allocation = + heap->new_space()->AllocateRaw(allocation_size); heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); - Object* result = allocation->ToObjectUnchecked(); - HeapObject* target = HeapObject::cast(result); + HeapObject* target = HeapObject::cast(allocation.ToObjectChecked()); if (alignment != kObjectAlignment) { target = EnsureDoubleAligned(heap, target, allocation_size); @@ -2616,11 +2321,11 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { } -MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, - int instance_size) { +AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, + int instance_size) { Object* result; - MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); + if (!allocation.To(&result)) return allocation; // Map::cast cannot be used due to uninitialized map field. reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); @@ -2640,15 +2345,15 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, } -MaybeObject* Heap::AllocateMap(InstanceType instance_type, - int instance_size, - ElementsKind elements_kind) { - Object* result; - MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); - if (!maybe_result->To(&result)) return maybe_result; +AllocationResult Heap::AllocateMap(InstanceType instance_type, + int instance_size, + ElementsKind elements_kind) { + HeapObject* result; + AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); + if (!allocation.To(&result)) return allocation; - Map* map = reinterpret_cast<Map*>(result); - map->set_map_no_write_barrier(meta_map()); + result->set_map_no_write_barrier(meta_map()); + Map* map = Map::cast(result); map->set_instance_type(instance_type); map->set_visitor_id( StaticVisitorBase::GetVisitorId(instance_type, instance_size)); @@ -2674,52 +2379,19 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type, } -MaybeObject* Heap::AllocateCodeCache() { - CodeCache* code_cache; - { MaybeObject* maybe_code_cache = AllocateStruct(CODE_CACHE_TYPE); - if (!maybe_code_cache->To(&code_cache)) return maybe_code_cache; - } - code_cache->set_default_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); - code_cache->set_normal_type_cache(undefined_value(), SKIP_WRITE_BARRIER); - return code_cache; -} - - -MaybeObject* Heap::AllocatePolymorphicCodeCache() { - return AllocateStruct(POLYMORPHIC_CODE_CACHE_TYPE); -} - - -MaybeObject* Heap::AllocateAccessorPair() { - AccessorPair* accessors; - { MaybeObject* maybe_accessors = AllocateStruct(ACCESSOR_PAIR_TYPE); - if (!maybe_accessors->To(&accessors)) return maybe_accessors; - } - accessors->set_getter(the_hole_value(), SKIP_WRITE_BARRIER); - accessors->set_setter(the_hole_value(), SKIP_WRITE_BARRIER); - accessors->set_access_flags(Smi::FromInt(0), SKIP_WRITE_BARRIER); - return accessors; -} - - -MaybeObject* Heap::AllocateTypeFeedbackInfo() { - TypeFeedbackInfo* info; - { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); - if (!maybe_info->To(&info)) return maybe_info; +AllocationResult Heap::AllocateFillerObject(int size, + bool double_align, + AllocationSpace space) { + HeapObject* obj; + { AllocationResult allocation = AllocateRaw(size, space, space); + if (!allocation.To(&obj)) return allocation; } - info->initialize_storage(); - info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER); - return info; -} - - -MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { - AliasedArgumentsEntry* entry; - { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); - if (!maybe_entry->To(&entry)) return maybe_entry; - } - entry->set_aliased_context_slot(aliased_context_slot); - return entry; +#ifdef DEBUG + MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); + ASSERT(chunk->owner()->identity() == space); +#endif + CreateFillerObjectAt(obj->address(), size); + return obj; } @@ -2748,60 +2420,62 @@ const Heap::StructTable Heap::struct_table[] = { bool Heap::CreateInitialMaps() { - Object* obj; - { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize); - if (!maybe_obj->ToObject(&obj)) return false; + HeapObject* obj; + { AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); + if (!allocation.To(&obj)) return false; } // Map::cast cannot be used due to uninitialized map field. Map* new_meta_map = reinterpret_cast<Map*>(obj); set_meta_map(new_meta_map); new_meta_map->set_map(new_meta_map); - { MaybeObject* maybe_obj = - AllocatePartialMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_fixed_array_map(Map::cast(obj)); + { // Partial map allocation +#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ + { Map* map; \ + if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ + set_##field_name##_map(map); \ + } - { MaybeObject* maybe_obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_oddball_map(Map::cast(obj)); + ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); + ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); + ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); + ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel, + constant_pool_array); - { MaybeObject* maybe_obj = - AllocatePartialMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel); - if (!maybe_obj->ToObject(&obj)) return false; +#undef ALLOCATE_PARTIAL_MAP } - set_constant_pool_array_map(Map::cast(obj)); // Allocate the empty array. - { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyFixedArray(); + if (!allocation.To(&obj)) return false; } set_empty_fixed_array(FixedArray::cast(obj)); - { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE); + if (!allocation.To(&obj)) return false; } set_null_value(Oddball::cast(obj)); Oddball::cast(obj)->set_kind(Oddball::kNull); - { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE); + if (!allocation.To(&obj)) return false; } set_undefined_value(Oddball::cast(obj)); Oddball::cast(obj)->set_kind(Oddball::kUndefined); ASSERT(!InNewSpace(undefined_value())); + // Set preliminary exception sentinel value before actually initializing it. + set_exception(null_value()); + // Allocate the empty descriptor array. - { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyFixedArray(); + if (!allocation.To(&obj)) return false; } set_empty_descriptor_array(DescriptorArray::cast(obj)); // Allocate the constant pool array. - { MaybeObject* maybe_obj = AllocateEmptyConstantPoolArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyConstantPoolArray(); + if (!allocation.To(&obj)) return false; } set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); @@ -2817,10 +2491,15 @@ bool Heap::CreateInitialMaps() { fixed_array_map()->init_back_pointer(undefined_value()); fixed_array_map()->set_instance_descriptors(empty_descriptor_array()); - oddball_map()->set_code_cache(empty_fixed_array()); - oddball_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); - oddball_map()->init_back_pointer(undefined_value()); - oddball_map()->set_instance_descriptors(empty_descriptor_array()); + undefined_map()->set_code_cache(empty_fixed_array()); + undefined_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); + undefined_map()->init_back_pointer(undefined_value()); + undefined_map()->set_instance_descriptors(empty_descriptor_array()); + + null_map()->set_code_cache(empty_fixed_array()); + null_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); + null_map()->init_back_pointer(undefined_value()); + null_map()->set_instance_descriptors(empty_descriptor_array()); constant_pool_array_map()->set_code_cache(empty_fixed_array()); constant_pool_array_map()->set_dependent_code( @@ -2835,8 +2514,11 @@ bool Heap::CreateInitialMaps() { fixed_array_map()->set_prototype(null_value()); fixed_array_map()->set_constructor(null_value()); - oddball_map()->set_prototype(null_value()); - oddball_map()->set_constructor(null_value()); + undefined_map()->set_prototype(null_value()); + undefined_map()->set_constructor(null_value()); + + null_map()->set_prototype(null_value()); + null_map()->set_constructor(null_value()); constant_pool_array_map()->set_prototype(null_value()); constant_pool_array_map()->set_constructor(null_value()); @@ -2844,7 +2526,7 @@ bool Heap::CreateInitialMaps() { { // Map allocation #define ALLOCATE_MAP(instance_type, size, field_name) \ { Map* map; \ - if (!AllocateMap((instance_type), size)->To(&map)) return false; \ + if (!AllocateMap((instance_type), size).To(&map)) return false; \ set_##field_name##_map(map); \ } @@ -2859,12 +2541,24 @@ bool Heap::CreateInitialMaps() { ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol) ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception); + ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception); + for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { const StringTypeTable& entry = string_type_table[i]; - { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateMap(entry.type, entry.size); + if (!allocation.To(&obj)) return false; } - roots_[entry.index] = Map::cast(obj); + // Mark cons string maps as unstable, because their objects can change + // maps during GC. + Map* map = Map::cast(obj); + if (StringShape(entry.type).IsCons()) map->mark_unstable(); + roots_[entry.index] = map; } ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string) @@ -2904,12 +2598,13 @@ bool Heap::CreateInitialMaps() { for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { const StructTable& entry = struct_table[i]; Map* map; - if (!AllocateMap(entry.type, entry.size)->To(&map)) + if (!AllocateMap(entry.type, entry.size).To(&map)) return false; roots_[entry.index] = map; } ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table) + ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context) @@ -2937,13 +2632,13 @@ bool Heap::CreateInitialMaps() { { // Empty arrays { ByteArray* byte_array; - if (!AllocateByteArray(0, TENURED)->To(&byte_array)) return false; + if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; set_empty_byte_array(byte_array); } #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \ { ExternalArray* obj; \ - if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \ + if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \ return false; \ set_empty_external_##type##_array(obj); \ } @@ -2953,7 +2648,7 @@ bool Heap::CreateInitialMaps() { #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ { FixedTypedArrayBase* obj; \ - if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array)->To(&obj)) \ + if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ return false; \ set_empty_fixed_##type##_array(obj); \ } @@ -2966,7 +2661,8 @@ bool Heap::CreateInitialMaps() { } -MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { +AllocationResult Heap::AllocateHeapNumber(double value, + PretenureFlag pretenure) { // Statically ensure that it is safe to allocate heap numbers in paged // spaces. int size = HeapNumber::kSize; @@ -2974,42 +2670,41 @@ MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); + result->set_map_no_write_barrier(heap_number_map()); HeapNumber::cast(result)->set_value(value); return result; } -MaybeObject* Heap::AllocateCell(Object* value) { +AllocationResult Heap::AllocateCell(Object* value) { int size = Cell::kSize; STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); + result->set_map_no_write_barrier(cell_map()); Cell::cast(result)->set_value(value); return result; } -MaybeObject* Heap::AllocatePropertyCell() { +AllocationResult Heap::AllocatePropertyCell() { int size = PropertyCell::kSize; STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - MaybeObject* maybe_result = + HeapObject* result; + AllocationResult allocation = AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; - HeapObject::cast(result)->set_map_no_write_barrier( - global_property_cell_map()); + result->set_map_no_write_barrier(global_property_cell_map()); PropertyCell* cell = PropertyCell::cast(result); cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), SKIP_WRITE_BARRIER); @@ -3019,78 +2714,36 @@ MaybeObject* Heap::AllocatePropertyCell() { } -MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { - Box* result; - MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); - if (!maybe_result->To(&result)) return maybe_result; - result->set_value(value); - return result; -} - - -MaybeObject* Heap::AllocateAllocationSite() { - AllocationSite* site; - MaybeObject* maybe_result = Allocate(allocation_site_map(), - OLD_POINTER_SPACE); - if (!maybe_result->To(&site)) return maybe_result; - site->Initialize(); - - // Link the site - site->set_weak_next(allocation_sites_list()); - set_allocation_sites_list(site); - return site; -} - - -MaybeObject* Heap::CreateOddball(const char* to_string, - Object* to_number, - byte kind) { - Object* result; - { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - return Oddball::cast(result)->Initialize(this, to_string, to_number, kind); -} - - -bool Heap::CreateApiObjects() { - Object* obj; +void Heap::CreateApiObjects() { + HandleScope scope(isolate()); + Factory* factory = isolate()->factory(); + Handle<Map> new_neander_map = + factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - if (!maybe_obj->ToObject(&obj)) return false; - } // Don't use Smi-only elements optimizations for objects with the neander // map. There are too many cases where element values are set directly with a // bottleneck to trap the Smi-only -> fast elements transition, and there // appears to be no benefit for optimize this case. - Map* new_neander_map = Map::cast(obj); new_neander_map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND); - set_neander_map(new_neander_map); + set_neander_map(*new_neander_map); - { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map()); - if (!maybe_obj->ToObject(&obj)) return false; - } - Object* elements; - { MaybeObject* maybe_elements = AllocateFixedArray(2); - if (!maybe_elements->ToObject(&elements)) return false; - } - FixedArray::cast(elements)->set(0, Smi::FromInt(0)); - JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); - set_message_listeners(JSObject::cast(obj)); - - return true; + Handle<JSObject> listeners = factory->NewNeanderObject(); + Handle<FixedArray> elements = factory->NewFixedArray(2); + elements->set(0, Smi::FromInt(0)); + listeners->set_elements(*elements); + set_message_listeners(*listeners); } void Heap::CreateJSEntryStub() { - JSEntryStub stub; - set_js_entry_code(*stub.GetCode(isolate())); + JSEntryStub stub(isolate()); + set_js_entry_code(*stub.GetCode()); } void Heap::CreateJSConstructEntryStub() { - JSConstructEntryStub stub; - set_js_construct_entry_code(*stub.GetCode(isolate())); + JSConstructEntryStub stub(isolate()); + set_js_construct_entry_code(*stub.GetCode()); } @@ -3124,108 +2777,86 @@ void Heap::CreateFixedStubs() { } -bool Heap::CreateInitialObjects() { - Object* obj; +void Heap::CreateInitialObjects() { + HandleScope scope(isolate()); + Factory* factory = isolate()->factory(); // The -0 value must be set before NumberFromDouble works. - { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_minus_zero_value(HeapNumber::cast(obj)); + set_minus_zero_value(*factory->NewHeapNumber(-0.0, TENURED)); ASSERT(std::signbit(minus_zero_value()->Number()) != 0); - { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_nan_value(HeapNumber::cast(obj)); - - { MaybeObject* maybe_obj = AllocateHeapNumber(V8_INFINITY, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_infinity_value(HeapNumber::cast(obj)); + set_nan_value(*factory->NewHeapNumber(OS::nan_value(), TENURED)); + set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, TENURED)); // The hole has not been created yet, but we want to put something // predictable in the gaps in the string table, so lets make that Smi zero. set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0))); // Allocate initial string table. - { MaybeObject* maybe_obj = - StringTable::Allocate(this, kInitialStringTableSize); - if (!maybe_obj->ToObject(&obj)) return false; - } - // Don't use set_string_table() due to asserts. - roots_[kStringTableRootIndex] = obj; + set_string_table(*StringTable::New(isolate(), kInitialStringTableSize)); // Finish initializing oddballs after creating the string table. - { MaybeObject* maybe_obj = - undefined_value()->Initialize(this, - "undefined", - nan_value(), - Oddball::kUndefined); - if (!maybe_obj->ToObject(&obj)) return false; - } + Oddball::Initialize(isolate(), + factory->undefined_value(), + "undefined", + factory->nan_value(), + Oddball::kUndefined); // Initialize the null_value. - { MaybeObject* maybe_obj = null_value()->Initialize( - this, "null", Smi::FromInt(0), Oddball::kNull); - if (!maybe_obj->ToObject(&obj)) return false; - } - - { MaybeObject* maybe_obj = CreateOddball("true", - Smi::FromInt(1), - Oddball::kTrue); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_true_value(Oddball::cast(obj)); - - { MaybeObject* maybe_obj = CreateOddball("false", - Smi::FromInt(0), - Oddball::kFalse); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_false_value(Oddball::cast(obj)); - - { MaybeObject* maybe_obj = CreateOddball("hole", - Smi::FromInt(-1), - Oddball::kTheHole); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_the_hole_value(Oddball::cast(obj)); - - { MaybeObject* maybe_obj = CreateOddball("uninitialized", - Smi::FromInt(-1), - Oddball::kUninitialized); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_uninitialized_value(Oddball::cast(obj)); - - { MaybeObject* maybe_obj = CreateOddball("arguments_marker", - Smi::FromInt(-4), - Oddball::kArgumentMarker); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_arguments_marker(Oddball::cast(obj)); - - { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel", - Smi::FromInt(-2), - Oddball::kOther); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_no_interceptor_result_sentinel(obj); - - { MaybeObject* maybe_obj = CreateOddball("termination_exception", - Smi::FromInt(-3), - Oddball::kOther); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_termination_exception(obj); + Oddball::Initialize(isolate(), + factory->null_value(), + "null", + handle(Smi::FromInt(0), isolate()), + Oddball::kNull); + + set_true_value(*factory->NewOddball(factory->boolean_map(), + "true", + handle(Smi::FromInt(1), isolate()), + Oddball::kTrue)); + + set_false_value(*factory->NewOddball(factory->boolean_map(), + "false", + handle(Smi::FromInt(0), isolate()), + Oddball::kFalse)); + + set_the_hole_value(*factory->NewOddball(factory->the_hole_map(), + "hole", + handle(Smi::FromInt(-1), isolate()), + Oddball::kTheHole)); + + set_uninitialized_value( + *factory->NewOddball(factory->uninitialized_map(), + "uninitialized", + handle(Smi::FromInt(-1), isolate()), + Oddball::kUninitialized)); + + set_arguments_marker(*factory->NewOddball(factory->arguments_marker_map(), + "arguments_marker", + handle(Smi::FromInt(-4), isolate()), + Oddball::kArgumentMarker)); + + set_no_interceptor_result_sentinel( + *factory->NewOddball(factory->no_interceptor_result_sentinel_map(), + "no_interceptor_result_sentinel", + handle(Smi::FromInt(-2), isolate()), + Oddball::kOther)); + + set_termination_exception( + *factory->NewOddball(factory->termination_exception_map(), + "termination_exception", + handle(Smi::FromInt(-3), isolate()), + Oddball::kOther)); + + set_exception( + *factory->NewOddball(factory->exception_map(), + "exception", + handle(Smi::FromInt(-5), isolate()), + Oddball::kException)); for (unsigned i = 0; i < ARRAY_SIZE(constant_string_table); i++) { - { MaybeObject* maybe_obj = - InternalizeUtf8String(constant_string_table[i].contents); - if (!maybe_obj->ToObject(&obj)) return false; - } - roots_[constant_string_table[i].index] = String::cast(obj); + Handle<String> str = + factory->InternalizeUtf8String(constant_string_table[i].contents); + roots_[constant_string_table[i].index] = *str; } // Allocate the hidden string which is used to identify the hidden properties @@ -3234,31 +2865,19 @@ bool Heap::CreateInitialObjects() { // loop above because it needs to be allocated manually with the special // hash code in place. The hash code for the hidden_string is zero to ensure // that it will always be at the first entry in property descriptors. - { MaybeObject* maybe_obj = AllocateOneByteInternalizedString( + hidden_string_ = *factory->NewOneByteInternalizedString( OneByteVector("", 0), String::kEmptyStringHash); - if (!maybe_obj->ToObject(&obj)) return false; - } - hidden_string_ = String::cast(obj); - // Allocate the code_stubs dictionary. The initial size is set to avoid + // Create the code_stubs dictionary. The initial size is set to avoid // expanding the dictionary during bootstrapping. - { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(this, 128); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_code_stubs(UnseededNumberDictionary::cast(obj)); - + set_code_stubs(*UnseededNumberDictionary::New(isolate(), 128)); - // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size + // Create the non_monomorphic_cache used in stub-cache.cc. The initial size // is set to avoid expanding the dictionary during bootstrapping. - { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(this, 64); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_non_monomorphic_cache(UnseededNumberDictionary::cast(obj)); + set_non_monomorphic_cache(*UnseededNumberDictionary::New(isolate(), 64)); - { MaybeObject* maybe_obj = AllocatePolymorphicCodeCache(); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_polymorphic_code_cache(PolymorphicCodeCache::cast(obj)); + set_polymorphic_code_cache(PolymorphicCodeCache::cast( + *factory->NewStruct(POLYMORPHIC_CODE_CACHE_TYPE))); set_instanceof_cache_function(Smi::FromInt(0)); set_instanceof_cache_map(Smi::FromInt(0)); @@ -3267,127 +2886,60 @@ bool Heap::CreateInitialObjects() { CreateFixedStubs(); // Allocate the dictionary of intrinsic function names. - { MaybeObject* maybe_obj = - NameDictionary::Allocate(this, Runtime::kNumFunctions); - if (!maybe_obj->ToObject(&obj)) return false; - } - { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this, - obj); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_intrinsic_function_names(NameDictionary::cast(obj)); + Handle<NameDictionary> intrinsic_names = + NameDictionary::New(isolate(), Runtime::kNumFunctions); + Runtime::InitializeIntrinsicFunctionNames(isolate(), intrinsic_names); + set_intrinsic_function_names(*intrinsic_names); - { MaybeObject* maybe_obj = AllocateInitialNumberStringCache(); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_number_string_cache(FixedArray::cast(obj)); + set_number_string_cache(*factory->NewFixedArray( + kInitialNumberStringCacheSize * 2, TENURED)); // Allocate cache for single character one byte strings. - { MaybeObject* maybe_obj = - AllocateFixedArray(String::kMaxOneByteCharCode + 1, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_single_character_string_cache(FixedArray::cast(obj)); + set_single_character_string_cache(*factory->NewFixedArray( + String::kMaxOneByteCharCode + 1, TENURED)); - // Allocate cache for string split. - { MaybeObject* maybe_obj = AllocateFixedArray( - RegExpResultsCache::kRegExpResultsCacheSize, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_string_split_cache(FixedArray::cast(obj)); - - { MaybeObject* maybe_obj = AllocateFixedArray( - RegExpResultsCache::kRegExpResultsCacheSize, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_regexp_multiple_cache(FixedArray::cast(obj)); + // Allocate cache for string split and regexp-multiple. + set_string_split_cache(*factory->NewFixedArray( + RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); + set_regexp_multiple_cache(*factory->NewFixedArray( + RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); // Allocate cache for external strings pointing to native source code. - { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount()); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_natives_source_cache(FixedArray::cast(obj)); + set_natives_source_cache(*factory->NewFixedArray( + Natives::GetBuiltinsCount())); - { MaybeObject* maybe_obj = AllocateCell(undefined_value()); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_undefined_cell(Cell::cast(obj)); + set_undefined_cell(*factory->NewCell(factory->undefined_value())); // The symbol registry is initialized lazily. set_symbol_registry(undefined_value()); // Allocate object to hold object observation state. - { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - if (!maybe_obj->ToObject(&obj)) return false; - } - { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_observation_state(JSObject::cast(obj)); + set_observation_state(*factory->NewJSObjectFromMap( + factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize))); // Allocate object to hold object microtask state. - { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - if (!maybe_obj->ToObject(&obj)) return false; - } - { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_microtask_state(JSObject::cast(obj)); - - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_frozen_symbol(Symbol::cast(obj)); - - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_nonexistent_symbol(Symbol::cast(obj)); - - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_elements_transition_symbol(Symbol::cast(obj)); + set_microtask_state(*factory->NewJSObjectFromMap( + factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize))); - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_uninitialized_symbol(Symbol::cast(obj)); + set_frozen_symbol(*factory->NewPrivateSymbol()); + set_nonexistent_symbol(*factory->NewPrivateSymbol()); + set_elements_transition_symbol(*factory->NewPrivateSymbol()); + set_uninitialized_symbol(*factory->NewPrivateSymbol()); + set_megamorphic_symbol(*factory->NewPrivateSymbol()); + set_observed_symbol(*factory->NewPrivateSymbol()); - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_megamorphic_symbol(Symbol::cast(obj)); + Handle<SeededNumberDictionary> slow_element_dictionary = + SeededNumberDictionary::New(isolate(), 0, TENURED); + slow_element_dictionary->set_requires_slow_elements(); + set_empty_slow_element_dictionary(*slow_element_dictionary); - { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); - set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); - - { MaybeObject* maybe_obj = AllocateSymbol(); - if (!maybe_obj->ToObject(&obj)) return false; - } - Symbol::cast(obj)->set_is_private(true); - set_observed_symbol(Symbol::cast(obj)); - - { MaybeObject* maybe_obj = AllocateFixedArray(0, TENURED); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_materialized_objects(FixedArray::cast(obj)); + set_materialized_objects(*factory->NewFixedArray(0, TENURED)); // Handling of script id generation is in Factory::NewScript. set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); - { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad(); - if (!maybe_obj->ToObject(&obj)) return false; - } - set_allocation_sites_scratchpad(FixedArray::cast(obj)); + set_allocation_sites_scratchpad(*factory->NewFixedArray( + kAllocationSiteScratchpadSize, TENURED)); InitializeAllocationSitesScratchpad(); // Initialize keyed lookup cache. @@ -3401,8 +2953,6 @@ bool Heap::CreateInitialObjects() { // Initialize compilation cache. isolate_->compilation_cache()->Clear(); - - return true; } @@ -3474,60 +3024,58 @@ Object* RegExpResultsCache::Lookup(Heap* heap, } -void RegExpResultsCache::Enter(Heap* heap, - String* key_string, - Object* key_pattern, - FixedArray* value_array, +void RegExpResultsCache::Enter(Isolate* isolate, + Handle<String> key_string, + Handle<Object> key_pattern, + Handle<FixedArray> value_array, ResultsCacheType type) { - FixedArray* cache; + Factory* factory = isolate->factory(); + Handle<FixedArray> cache; if (!key_string->IsInternalizedString()) return; if (type == STRING_SPLIT_SUBSTRINGS) { ASSERT(key_pattern->IsString()); if (!key_pattern->IsInternalizedString()) return; - cache = heap->string_split_cache(); + cache = factory->string_split_cache(); } else { ASSERT(type == REGEXP_MULTIPLE_INDICES); ASSERT(key_pattern->IsFixedArray()); - cache = heap->regexp_multiple_cache(); + cache = factory->regexp_multiple_cache(); } uint32_t hash = key_string->Hash(); uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) & ~(kArrayEntriesPerCacheEntry - 1)); if (cache->get(index + kStringOffset) == Smi::FromInt(0)) { - cache->set(index + kStringOffset, key_string); - cache->set(index + kPatternOffset, key_pattern); - cache->set(index + kArrayOffset, value_array); + cache->set(index + kStringOffset, *key_string); + cache->set(index + kPatternOffset, *key_pattern); + cache->set(index + kArrayOffset, *value_array); } else { uint32_t index2 = ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1)); if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) { - cache->set(index2 + kStringOffset, key_string); - cache->set(index2 + kPatternOffset, key_pattern); - cache->set(index2 + kArrayOffset, value_array); + cache->set(index2 + kStringOffset, *key_string); + cache->set(index2 + kPatternOffset, *key_pattern); + cache->set(index2 + kArrayOffset, *value_array); } else { cache->set(index2 + kStringOffset, Smi::FromInt(0)); cache->set(index2 + kPatternOffset, Smi::FromInt(0)); cache->set(index2 + kArrayOffset, Smi::FromInt(0)); - cache->set(index + kStringOffset, key_string); - cache->set(index + kPatternOffset, key_pattern); - cache->set(index + kArrayOffset, value_array); + cache->set(index + kStringOffset, *key_string); + cache->set(index + kPatternOffset, *key_pattern); + cache->set(index + kArrayOffset, *value_array); } } // If the array is a reasonably short list of substrings, convert it into a // list of internalized strings. if (type == STRING_SPLIT_SUBSTRINGS && value_array->length() < 100) { for (int i = 0; i < value_array->length(); i++) { - String* str = String::cast(value_array->get(i)); - Object* internalized_str; - MaybeObject* maybe_string = heap->InternalizeString(str); - if (maybe_string->ToObject(&internalized_str)) { - value_array->set(i, internalized_str); - } + Handle<String> str(String::cast(value_array->get(i)), isolate); + Handle<String> internalized_str = factory->InternalizeString(str); + value_array->set(i, *internalized_str); } } // Convert backing store to a copy-on-write array. - value_array->set_map_no_write_barrier(heap->fixed_cow_array_map()); + value_array->set_map_no_write_barrier(*factory->fixed_cow_array_map()); } @@ -3538,13 +3086,6 @@ void RegExpResultsCache::Clear(FixedArray* cache) { } -MaybeObject* Heap::AllocateInitialNumberStringCache() { - MaybeObject* maybe_obj = - AllocateFixedArray(kInitialNumberStringCacheSize * 2, TENURED); - return maybe_obj; -} - - int Heap::FullSizeNumberStringCacheLength() { // Compute the size of the number string cache based on the max newspace size. // The number string cache has a minimum size based on twice the initial cache @@ -3558,24 +3099,6 @@ int Heap::FullSizeNumberStringCacheLength() { } -void Heap::AllocateFullSizeNumberStringCache() { - // The idea is to have a small number string cache in the snapshot to keep - // boot-time memory usage down. If we expand the number string cache already - // while creating the snapshot then that didn't work out. - ASSERT(!Serializer::enabled() || FLAG_extra_code != NULL); - MaybeObject* maybe_obj = - AllocateFixedArray(FullSizeNumberStringCacheLength(), TENURED); - Object* new_cache; - if (maybe_obj->ToObject(&new_cache)) { - // We don't bother to repopulate the cache with entries from the old cache. - // It will be repopulated soon enough with new strings. - set_number_string_cache(FixedArray::cast(new_cache)); - } - // If allocation fails then we just return without doing anything. It is only - // a cache, so best effort is OK here. -} - - void Heap::FlushNumberStringCache() { // Flush the number to string cache. int len = number_string_cache()->length(); @@ -3585,107 +3108,6 @@ void Heap::FlushNumberStringCache() { } -static inline int double_get_hash(double d) { - DoubleRepresentation rep(d); - return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32); -} - - -static inline int smi_get_hash(Smi* smi) { - return smi->value(); -} - - -Object* Heap::GetNumberStringCache(Object* number) { - int hash; - int mask = (number_string_cache()->length() >> 1) - 1; - if (number->IsSmi()) { - hash = smi_get_hash(Smi::cast(number)) & mask; - } else { - hash = double_get_hash(number->Number()) & mask; - } - Object* key = number_string_cache()->get(hash * 2); - if (key == number) { - return String::cast(number_string_cache()->get(hash * 2 + 1)); - } else if (key->IsHeapNumber() && - number->IsHeapNumber() && - key->Number() == number->Number()) { - return String::cast(number_string_cache()->get(hash * 2 + 1)); - } - return undefined_value(); -} - - -void Heap::SetNumberStringCache(Object* number, String* string) { - int hash; - int mask = (number_string_cache()->length() >> 1) - 1; - if (number->IsSmi()) { - hash = smi_get_hash(Smi::cast(number)) & mask; - } else { - hash = double_get_hash(number->Number()) & mask; - } - if (number_string_cache()->get(hash * 2) != undefined_value() && - number_string_cache()->length() != FullSizeNumberStringCacheLength()) { - // The first time we have a hash collision, we move to the full sized - // number string cache. - AllocateFullSizeNumberStringCache(); - return; - } - number_string_cache()->set(hash * 2, number); - number_string_cache()->set(hash * 2 + 1, string); -} - - -MaybeObject* Heap::NumberToString(Object* number, - bool check_number_string_cache) { - isolate_->counters()->number_to_string_runtime()->Increment(); - if (check_number_string_cache) { - Object* cached = GetNumberStringCache(number); - if (cached != undefined_value()) { - return cached; - } - } - - char arr[100]; - Vector<char> buffer(arr, ARRAY_SIZE(arr)); - const char* str; - if (number->IsSmi()) { - int num = Smi::cast(number)->value(); - str = IntToCString(num, buffer); - } else { - double num = HeapNumber::cast(number)->value(); - str = DoubleToCString(num, buffer); - } - - Object* js_string; - - // We tenure the allocated string since it is referenced from the - // number-string cache which lives in the old space. - MaybeObject* maybe_js_string = - AllocateStringFromOneByte(CStrVector(str), TENURED); - if (maybe_js_string->ToObject(&js_string)) { - SetNumberStringCache(number, String::cast(js_string)); - } - return maybe_js_string; -} - - -MaybeObject* Heap::Uint32ToString(uint32_t value, - bool check_number_string_cache) { - Object* number; - MaybeObject* maybe = NumberFromUint32(value); - if (!maybe->To<Object>(&number)) return maybe; - return NumberToString(number, check_number_string_cache); -} - - -MaybeObject* Heap::AllocateAllocationSitesScratchpad() { - MaybeObject* maybe_obj = - AllocateFixedArray(kAllocationSiteScratchpadSize, TENURED); - return maybe_obj; -} - - void Heap::FlushAllocationSitesScratchpad() { for (int i = 0; i < allocation_sites_scratchpad_length_; i++) { allocation_sites_scratchpad()->set_undefined(i); @@ -3816,188 +3238,32 @@ FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) { } -MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { - // We need to distinguish the minus zero value and this cannot be - // done after conversion to int. Doing this by comparing bit - // patterns is faster than using fpclassify() et al. - if (IsMinusZero(value)) { - return AllocateHeapNumber(-0.0, pretenure); - } - - int int_value = FastD2I(value); - if (value == int_value && Smi::IsValid(int_value)) { - return Smi::FromInt(int_value); - } - - // Materialize the value in the heap. - return AllocateHeapNumber(value, pretenure); -} - - -MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { +AllocationResult Heap::AllocateForeign(Address address, + PretenureFlag pretenure) { // Statically ensure that it is safe to allocate foreigns in paged spaces. STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize); AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; Foreign* result; - MaybeObject* maybe_result = Allocate(foreign_map(), space); - if (!maybe_result->To(&result)) return maybe_result; + AllocationResult allocation = Allocate(foreign_map(), space); + if (!allocation.To(&result)) return allocation; result->set_foreign_address(address); return result; } -MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { - SharedFunctionInfo* share; - MaybeObject* maybe = Allocate(shared_function_info_map(), OLD_POINTER_SPACE); - if (!maybe->To<SharedFunctionInfo>(&share)) return maybe; - - // Set pointer fields. - share->set_name(name); - Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal); - share->set_code(illegal); - share->set_optimized_code_map(Smi::FromInt(0)); - share->set_scope_info(ScopeInfo::Empty(isolate_)); - Code* construct_stub = - isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric); - share->set_construct_stub(construct_stub); - share->set_instance_class_name(Object_string()); - share->set_function_data(undefined_value(), SKIP_WRITE_BARRIER); - share->set_script(undefined_value(), SKIP_WRITE_BARRIER); - share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER); - share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER); - share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER); - share->set_ast_node_count(0); - share->set_counters(0); - - // Set integer fields (smi or int, depending on the architecture). - share->set_length(0); - share->set_formal_parameter_count(0); - share->set_expected_nof_properties(0); - share->set_num_literals(0); - share->set_start_position_and_type(0); - share->set_end_position(0); - share->set_function_token_position(0); - // All compiler hints default to false or 0. - share->set_compiler_hints(0); - share->set_opt_count_and_bailout_reason(0); - - return share; -} - - -MaybeObject* Heap::AllocateJSMessageObject(String* type, - JSArray* arguments, - int start_position, - int end_position, - Object* script, - Object* stack_frames) { - Object* result; - { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - JSMessageObject* message = JSMessageObject::cast(result); - message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); - message->initialize_elements(); - message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); - message->set_type(type); - message->set_arguments(arguments); - message->set_start_position(start_position); - message->set_end_position(end_position); - message->set_script(script); - message->set_stack_frames(stack_frames); - return result; -} - - -MaybeObject* Heap::AllocateExternalStringFromAscii( - const ExternalAsciiString::Resource* resource) { - size_t length = resource->length(); - if (length > static_cast<size_t>(String::kMaxLength)) { - return isolate()->ThrowInvalidStringLength(); - } - - Map* map = external_ascii_string_map(); - Object* result; - { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - - ExternalAsciiString* external_string = ExternalAsciiString::cast(result); - external_string->set_length(static_cast<int>(length)); - external_string->set_hash_field(String::kEmptyHashField); - external_string->set_resource(resource); - - return result; -} - - -MaybeObject* Heap::AllocateExternalStringFromTwoByte( - const ExternalTwoByteString::Resource* resource) { - size_t length = resource->length(); - if (length > static_cast<size_t>(String::kMaxLength)) { - return isolate()->ThrowInvalidStringLength(); - } - - // For small strings we check whether the resource contains only - // one byte characters. If yes, we use a different string map. - static const size_t kOneByteCheckLengthLimit = 32; - bool is_one_byte = length <= kOneByteCheckLengthLimit && - String::IsOneByte(resource->data(), static_cast<int>(length)); - Map* map = is_one_byte ? - external_string_with_one_byte_data_map() : external_string_map(); - Object* result; - { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - - ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result); - external_string->set_length(static_cast<int>(length)); - external_string->set_hash_field(String::kEmptyHashField); - external_string->set_resource(resource); - - return result; -} - - -MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) { - if (code <= String::kMaxOneByteCharCode) { - Object* value = single_character_string_cache()->get(code); - if (value != undefined_value()) return value; - - uint8_t buffer[1]; - buffer[0] = static_cast<uint8_t>(code); - Object* result; - OneByteStringKey key(Vector<const uint8_t>(buffer, 1), HashSeed()); - MaybeObject* maybe_result = InternalizeStringWithKey(&key); - - if (!maybe_result->ToObject(&result)) return maybe_result; - single_character_string_cache()->set(code, result); - return result; - } - - SeqTwoByteString* result; - { MaybeObject* maybe_result = AllocateRawTwoByteString(1); - if (!maybe_result->To<SeqTwoByteString>(&result)) return maybe_result; - } - result->SeqTwoByteStringSet(0, code); - return result; -} - - -MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { if (length < 0 || length > ByteArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } int size = ByteArray::SizeFor(length); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( - byte_array_map()); - reinterpret_cast<ByteArray*>(result)->set_length(length); + result->set_map_no_write_barrier(byte_array_map()); + ByteArray::cast(result)->set_length(length); return result; } @@ -4023,11 +3289,18 @@ bool Heap::CanMoveObjectStart(HeapObject* object) { if (lo_space()->Contains(object)) return false; - // We cannot move the object start if the given old space page is - // concurrently swept. + Page* page = Page::FromAddress(address); + // We can move the object start if: + // (1) the object is not in old pointer or old data space, + // (2) the page of the object was already swept, + // (3) the page was already concurrently swept. This case is an optimization + // for concurrent sweeping. The WasSwept predicate for concurrently swept + // pages is set after sweeping all pages. return (!is_in_old_pointer_space && !is_in_old_data_space) || - Page::FromAddress(address)->parallel_sweeping() <= - MemoryChunk::PARALLEL_SWEEPING_FINALIZE; + page->WasSwept() || + (mark_compact_collector()->AreSweeperThreadsActivated() && + page->parallel_sweeping() <= + MemoryChunk::PARALLEL_SWEEPING_FINALIZE); } @@ -4043,23 +3316,21 @@ void Heap::AdjustLiveBytes(Address address, int by, InvocationMode mode) { } -MaybeObject* Heap::AllocateExternalArray(int length, +AllocationResult Heap::AllocateExternalArray(int length, ExternalArrayType array_type, void* external_pointer, PretenureFlag pretenure) { int size = ExternalArray::kAlignedSize; AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier( + result->set_map_no_write_barrier( MapForExternalArrayType(array_type)); - reinterpret_cast<ExternalArray*>(result)->set_length(length); - reinterpret_cast<ExternalArray*>(result)->set_external_pointer( - external_pointer); - + ExternalArray::cast(result)->set_length(length); + ExternalArray::cast(result)->set_external_pointer(external_pointer); return result; } @@ -4084,9 +3355,9 @@ static void ForFixedTypedArray(ExternalArrayType array_type, } -MaybeObject* Heap::AllocateFixedTypedArray(int length, - ExternalArrayType array_type, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateFixedTypedArray(int length, + ExternalArrayType array_type, + PretenureFlag pretenure) { int element_size; ElementsKind elements_kind; ForFixedTypedArray(array_type, &element_size, &elements_kind); @@ -4100,132 +3371,65 @@ MaybeObject* Heap::AllocateFixedTypedArray(int length, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); HeapObject* object; - MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_object->To(&object)) return maybe_object; + AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&object)) return allocation; if (array_type == kExternalFloat64Array) { object = EnsureDoubleAligned(this, object, size); } - FixedTypedArrayBase* elements = - reinterpret_cast<FixedTypedArrayBase*>(object); - elements->set_map(MapForFixedTypedArray(array_type)); + object->set_map(MapForFixedTypedArray(array_type)); + FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); elements->set_length(length); memset(elements->DataPtr(), 0, elements->DataSize()); return elements; } -MaybeObject* Heap::CreateCode(const CodeDesc& desc, - Code::Flags flags, - Handle<Object> self_reference, - bool immovable, - bool crankshafted, - int prologue_offset) { - // Allocate ByteArray and ConstantPoolArray before the Code object, so that we - // do not risk leaving uninitialized Code object (and breaking the heap). - ByteArray* reloc_info; - MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); - if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; - - ConstantPoolArray* constant_pool; - if (FLAG_enable_ool_constant_pool) { - MaybeObject* maybe_constant_pool = desc.origin->AllocateConstantPool(this); - if (!maybe_constant_pool->To(&constant_pool)) return maybe_constant_pool; - } else { - constant_pool = empty_constant_pool_array(); - } - - // Compute size. - int body_size = RoundUp(desc.instr_size, kObjectAlignment); - int obj_size = Code::SizeFor(body_size); - ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); - MaybeObject* maybe_result; +AllocationResult Heap::AllocateCode(int object_size, + bool immovable) { + ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); + AllocationResult allocation; // Large code objects and code objects which should stay at a fixed address // are allocated in large object space. HeapObject* result; - bool force_lo_space = obj_size > code_space()->AreaSize(); + bool force_lo_space = object_size > code_space()->AreaSize(); if (force_lo_space) { - maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); } - if (!maybe_result->To<HeapObject>(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; if (immovable && !force_lo_space && - // Objects on the first page of each space are never moved. - !code_space_->FirstPage()->Contains(result->address())) { + // Objects on the first page of each space are never moved. + !code_space_->FirstPage()->Contains(result->address())) { // Discard the first code allocation, which was on a page where it could be // moved. - CreateFillerObjectAt(result->address(), obj_size); - maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); - if (!maybe_result->To<HeapObject>(&result)) return maybe_result; + CreateFillerObjectAt(result->address(), object_size); + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); + if (!allocation.To(&result)) return allocation; } - // Initialize the object result->set_map_no_write_barrier(code_map()); Code* code = Code::cast(result); ASSERT(!isolate_->code_range()->exists() || isolate_->code_range()->contains(code->address())); - code->set_instruction_size(desc.instr_size); - code->set_relocation_info(reloc_info); - code->set_flags(flags); - code->set_raw_kind_specific_flags1(0); - code->set_raw_kind_specific_flags2(0); - code->set_is_crankshafted(crankshafted); - code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); - code->set_raw_type_feedback_info(undefined_value()); - code->set_next_code_link(undefined_value()); - code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); code->set_gc_metadata(Smi::FromInt(0)); code->set_ic_age(global_ic_age_); - code->set_prologue_offset(prologue_offset); - if (code->kind() == Code::OPTIMIZED_FUNCTION) { - code->set_marked_for_deoptimization(false); - } - - if (FLAG_enable_ool_constant_pool) { - desc.origin->PopulateConstantPool(constant_pool); - } - code->set_constant_pool(constant_pool); - -#ifdef ENABLE_DEBUGGER_SUPPORT - if (code->kind() == Code::FUNCTION) { - code->set_has_debug_break_slots( - isolate_->debugger()->IsDebuggerActive()); - } -#endif - - // Allow self references to created code object by patching the handle to - // point to the newly allocated Code object. - if (!self_reference.is_null()) { - *(self_reference.location()) = code; - } - // Migrate generated code. - // The generated code can contain Object** values (typically from handles) - // that are dereferenced during the copy to point directly to the actual heap - // objects. These pointers can include references to the code object itself, - // through the self_reference parameter. - code->CopyFrom(desc); - -#ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - code->Verify(); - } -#endif return code; } -MaybeObject* Heap::CopyCode(Code* code) { - MaybeObject* maybe_result; - Object* new_constant_pool; +AllocationResult Heap::CopyCode(Code* code) { + AllocationResult allocation; + HeapObject* new_constant_pool; if (FLAG_enable_ool_constant_pool && code->constant_pool() != empty_constant_pool_array()) { // Copy the constant pool, since edits to the copied code may modify // the constant pool. - maybe_result = CopyConstantPoolArray(code->constant_pool()); - if (!maybe_result->ToObject(&new_constant_pool)) return maybe_result; + allocation = CopyConstantPoolArray(code->constant_pool()); + if (!allocation.To(&new_constant_pool)) return allocation; } else { new_constant_pool = empty_constant_pool_array(); } @@ -4233,17 +3437,17 @@ MaybeObject* Heap::CopyCode(Code* code) { // Allocate an object the same size as the code object. int obj_size = code->Size(); if (obj_size > code_space()->AreaSize()) { - maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); } - Object* result; - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + if (!allocation.To(&result)) return allocation; // Copy code object. Address old_addr = code->address(); - Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); + Address new_addr = result->address(); CopyBlock(new_addr, old_addr, obj_size); Code* new_code = Code::cast(result); @@ -4258,25 +3462,22 @@ MaybeObject* Heap::CopyCode(Code* code) { } -MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { +AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { // Allocate ByteArray and ConstantPoolArray before the Code object, so that we // do not risk leaving uninitialized Code object (and breaking the heap). - Object* reloc_info_array; - { MaybeObject* maybe_reloc_info_array = + ByteArray* reloc_info_array; + { AllocationResult allocation = AllocateByteArray(reloc_info.length(), TENURED); - if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) { - return maybe_reloc_info_array; - } + if (!allocation.To(&reloc_info_array)) return allocation; } - Object* new_constant_pool; + HeapObject* new_constant_pool; if (FLAG_enable_ool_constant_pool && code->constant_pool() != empty_constant_pool_array()) { // Copy the constant pool, since edits to the copied code may modify // the constant pool. - MaybeObject* maybe_constant_pool = + AllocationResult allocation = CopyConstantPoolArray(code->constant_pool()); - if (!maybe_constant_pool->ToObject(&new_constant_pool)) - return maybe_constant_pool; + if (!allocation.To(&new_constant_pool)) return allocation; } else { new_constant_pool = empty_constant_pool_array(); } @@ -4290,24 +3491,24 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { size_t relocation_offset = static_cast<size_t>(code->instruction_end() - old_addr); - MaybeObject* maybe_result; + AllocationResult allocation; if (new_obj_size > code_space()->AreaSize()) { - maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); } - Object* result; - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + if (!allocation.To(&result)) return allocation; // Copy code object. - Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); + Address new_addr = result->address(); // Copy header and instructions. CopyBytes(new_addr, old_addr, relocation_offset); Code* new_code = Code::cast(result); - new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); + new_code->set_relocation_info(reloc_info_array); // Update constant pool. new_code->set_constant_pool(new_constant_pool); @@ -4323,9 +3524,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { new_code->Relocate(new_addr - old_addr); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - code->Verify(); - } + if (FLAG_verify_heap) code->ObjectVerify(); #endif return new_code; } @@ -4342,7 +3541,7 @@ void Heap::InitializeAllocationMemento(AllocationMemento* memento, } -MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, +AllocationResult Heap::Allocate(Map* map, AllocationSpace space, AllocationSite* allocation_site) { ASSERT(gc_state_ == NOT_IN_GC); ASSERT(map->instance_type() != MAP_TYPE); @@ -4354,11 +3553,11 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, if (allocation_site != NULL) { size += AllocationMemento::kSize; } - Object* result; - MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + AllocationResult allocation = AllocateRaw(size, space, retry_space); + if (!allocation.To(&result)) return allocation; // No need for write barrier since object is white and map is in old space. - HeapObject::cast(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); if (allocation_site != NULL) { AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( reinterpret_cast<Address>(result) + map->instance_size()); @@ -4368,37 +3567,7 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, } -void Heap::InitializeFunction(JSFunction* function, - SharedFunctionInfo* shared, - Object* prototype) { - ASSERT(!prototype->IsMap()); - function->initialize_properties(); - function->initialize_elements(); - function->set_shared(shared); - function->set_code(shared->code()); - function->set_prototype_or_initial_map(prototype); - function->set_context(undefined_value()); - function->set_literals_or_bindings(empty_fixed_array()); - function->set_next_function_link(undefined_value()); -} - - -MaybeObject* Heap::AllocateFunction(Map* function_map, - SharedFunctionInfo* shared, - Object* prototype, - PretenureFlag pretenure) { - AllocationSpace space = - (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; - Object* result; - { MaybeObject* maybe_result = Allocate(function_map, space); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - InitializeFunction(JSFunction::cast(result), shared, prototype); - return result; -} - - -MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { +AllocationResult Heap::AllocateArgumentsObject(Object* callee, int length) { // To get fast allocation and map sharing for arguments objects we // allocate them based on an arguments boilerplate. @@ -4422,34 +3591,31 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { ASSERT(arguments_object_size == boilerplate->map()->instance_size()); // Do the allocation. - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } // Copy the content. The arguments boilerplate doesn't have any // fields that point to new space so it's safe to skip the write // barrier here. - CopyBlock(HeapObject::cast(result)->address(), - boilerplate->address(), - JSObject::kHeaderSize); + CopyBlock(result->address(), boilerplate->address(), JSObject::kHeaderSize); // Set the length property. - JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex, - Smi::FromInt(length), - SKIP_WRITE_BARRIER); + JSObject* js_obj = JSObject::cast(result); + js_obj->InObjectPropertyAtPut( + kArgumentsLengthIndex, Smi::FromInt(length), SKIP_WRITE_BARRIER); // Set the callee property for sloppy mode arguments object only. if (!strict_mode_callee) { - JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex, - callee); + js_obj->InObjectPropertyAtPut(kArgumentsCalleeIndex, callee); } // Check the state of the object - ASSERT(JSObject::cast(result)->HasFastProperties()); - ASSERT(JSObject::cast(result)->HasFastObjectElements()); + ASSERT(js_obj->HasFastProperties()); + ASSERT(js_obj->HasFastObjectElements()); - return result; + return js_obj; } @@ -4485,7 +3651,7 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, } -MaybeObject* Heap::AllocateJSObjectFromMap( +AllocationResult Heap::AllocateJSObjectFromMap( Map* map, PretenureFlag pretenure, bool allocate_properties, @@ -4504,8 +3670,8 @@ MaybeObject* Heap::AllocateJSObjectFromMap( if (allocate_properties) { int prop_size = map->InitialPropertiesLength(); ASSERT(prop_size >= 0); - { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); - if (!maybe_properties->To(&properties)) return maybe_properties; + { AllocationResult allocation = AllocateFixedArray(prop_size, pretenure); + if (!allocation.To(&properties)) return allocation; } } else { properties = empty_fixed_array(); @@ -4514,202 +3680,37 @@ MaybeObject* Heap::AllocateJSObjectFromMap( // Allocate the JSObject. int size = map->instance_size(); AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); - Object* obj; - MaybeObject* maybe_obj = Allocate(map, space, allocation_site); - if (!maybe_obj->To(&obj)) return maybe_obj; + JSObject* js_obj; + AllocationResult allocation = Allocate(map, space, allocation_site); + if (!allocation.To(&js_obj)) return allocation; // Initialize the JSObject. - InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); - ASSERT(JSObject::cast(obj)->HasFastElements() || - JSObject::cast(obj)->HasExternalArrayElements() || - JSObject::cast(obj)->HasFixedTypedArrayElements()); - return obj; + InitializeJSObjectFromMap(js_obj, properties, map); + ASSERT(js_obj->HasFastElements() || + js_obj->HasExternalArrayElements() || + js_obj->HasFixedTypedArrayElements()); + return js_obj; } -MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, - PretenureFlag pretenure, - AllocationSite* allocation_site) { +AllocationResult Heap::AllocateJSObject(JSFunction* constructor, + PretenureFlag pretenure, + AllocationSite* allocation_site) { ASSERT(constructor->has_initial_map()); // Allocate the object based on the constructors initial map. - MaybeObject* result = AllocateJSObjectFromMap(constructor->initial_map(), - pretenure, - true, - allocation_site); + AllocationResult allocation = AllocateJSObjectFromMap( + constructor->initial_map(), pretenure, true, allocation_site); #ifdef DEBUG // Make sure result is NOT a global object if valid. - Object* non_failure; - ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); + HeapObject* obj; + ASSERT(!allocation.To(&obj) || !obj->IsGlobalObject()); #endif - return result; -} - - -MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { - // Allocate a fresh map. Modules do not have a prototype. - Map* map; - MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); - if (!maybe_map->To(&map)) return maybe_map; - // Allocate the object based on the map. - JSModule* module; - MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); - if (!maybe_module->To(&module)) return maybe_module; - module->set_context(context); - module->set_scope_info(scope_info); - return module; -} - - -MaybeObject* Heap::AllocateJSArrayAndStorage( - ElementsKind elements_kind, - int length, - int capacity, - ArrayStorageAllocationMode mode, - PretenureFlag pretenure) { - MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure); - JSArray* array; - if (!maybe_array->To(&array)) return maybe_array; - - // TODO(mvstanton): this body of code is duplicate with AllocateJSArrayStorage - // for performance reasons. - ASSERT(capacity >= length); - - if (capacity == 0) { - array->set_length(Smi::FromInt(0)); - array->set_elements(empty_fixed_array()); - return array; - } - - FixedArrayBase* elms; - MaybeObject* maybe_elms = NULL; - if (IsFastDoubleElementsKind(elements_kind)) { - if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { - maybe_elms = AllocateUninitializedFixedDoubleArray(capacity); - } else { - ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); - maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity); - } - } else { - ASSERT(IsFastSmiOrObjectElementsKind(elements_kind)); - if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { - maybe_elms = AllocateUninitializedFixedArray(capacity); - } else { - ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); - maybe_elms = AllocateFixedArrayWithHoles(capacity); - } - } - if (!maybe_elms->To(&elms)) return maybe_elms; - - array->set_elements(elms); - array->set_length(Smi::FromInt(length)); - return array; + return allocation; } -MaybeObject* Heap::AllocateJSArrayStorage( - JSArray* array, - int length, - int capacity, - ArrayStorageAllocationMode mode) { - ASSERT(capacity >= length); - - if (capacity == 0) { - array->set_length(Smi::FromInt(0)); - array->set_elements(empty_fixed_array()); - return array; - } - - FixedArrayBase* elms; - MaybeObject* maybe_elms = NULL; - ElementsKind elements_kind = array->GetElementsKind(); - if (IsFastDoubleElementsKind(elements_kind)) { - if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { - maybe_elms = AllocateUninitializedFixedDoubleArray(capacity); - } else { - ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); - maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity); - } - } else { - ASSERT(IsFastSmiOrObjectElementsKind(elements_kind)); - if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { - maybe_elms = AllocateUninitializedFixedArray(capacity); - } else { - ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); - maybe_elms = AllocateFixedArrayWithHoles(capacity); - } - } - if (!maybe_elms->To(&elms)) return maybe_elms; - - array->set_elements(elms); - array->set_length(Smi::FromInt(length)); - return array; -} - - -MaybeObject* Heap::AllocateJSArrayWithElements( - FixedArrayBase* elements, - ElementsKind elements_kind, - int length, - PretenureFlag pretenure) { - MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure); - JSArray* array; - if (!maybe_array->To(&array)) return maybe_array; - - array->set_elements(elements); - array->set_length(Smi::FromInt(length)); - array->ValidateElements(); - return array; -} - - -MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) { - // Allocate map. - // TODO(rossberg): Once we optimize proxies, think about a scheme to share - // maps. Will probably depend on the identity of the handler object, too. - Map* map; - MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize); - if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj; - map->set_prototype(prototype); - - // Allocate the proxy object. - JSProxy* result; - MaybeObject* maybe_result = Allocate(map, NEW_SPACE); - if (!maybe_result->To<JSProxy>(&result)) return maybe_result; - result->InitializeBody(map->instance_size(), Smi::FromInt(0)); - result->set_handler(handler); - result->set_hash(undefined_value(), SKIP_WRITE_BARRIER); - return result; -} - - -MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler, - Object* call_trap, - Object* construct_trap, - Object* prototype) { - // Allocate map. - // TODO(rossberg): Once we optimize proxies, think about a scheme to share - // maps. Will probably depend on the identity of the handler object, too. - Map* map; - MaybeObject* maybe_map_obj = - AllocateMap(JS_FUNCTION_PROXY_TYPE, JSFunctionProxy::kSize); - if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj; - map->set_prototype(prototype); - - // Allocate the proxy object. - JSFunctionProxy* result; - MaybeObject* maybe_result = Allocate(map, NEW_SPACE); - if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result; - result->InitializeBody(map->instance_size(), Smi::FromInt(0)); - result->set_handler(handler); - result->set_hash(undefined_value(), SKIP_WRITE_BARRIER); - result->set_call_trap(call_trap); - result->set_construct_trap(construct_trap); - return result; -} - - -MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { +AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // Never used to copy functions. If functions need to be copied we // have to be careful to clear the literals array. SLOW_ASSERT(!source->IsJSFunction()); @@ -4717,7 +3718,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // Make the clone. Map* map = source->map(); int object_size = map->instance_size(); - Object* clone; + HeapObject* clone; ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type())); @@ -4726,11 +3727,11 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // If we're forced to always allocate, we use the general allocation // functions which may leave us with an object in old space. if (always_allocate()) { - { MaybeObject* maybe_clone = + { AllocationResult allocation = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); - if (!maybe_clone->ToObject(&clone)) return maybe_clone; + if (!allocation.To(&clone)) return allocation; } - Address clone_address = HeapObject::cast(clone)->address(); + Address clone_address = clone->address(); CopyBlock(clone_address, source->address(), object_size); @@ -4744,14 +3745,14 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { { int adjusted_object_size = site != NULL ? object_size + AllocationMemento::kSize : object_size; - MaybeObject* maybe_clone = + AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); - if (!maybe_clone->ToObject(&clone)) return maybe_clone; + if (!allocation.To(&clone)) return allocation; } SLOW_ASSERT(InNewSpace(clone)); // Since we know the clone is allocated in new space, we can copy // the contents without worrying about updating the write barrier. - CopyBlock(HeapObject::cast(clone)->address(), + CopyBlock(clone->address(), source->address(), object_size); @@ -4768,143 +3769,35 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { FixedArray* properties = FixedArray::cast(source->properties()); // Update elements if necessary. if (elements->length() > 0) { - Object* elem; - { MaybeObject* maybe_elem; + FixedArrayBase* elem; + { AllocationResult allocation; if (elements->map() == fixed_cow_array_map()) { - maybe_elem = FixedArray::cast(elements); + allocation = FixedArray::cast(elements); } else if (source->HasFastDoubleElements()) { - maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); + allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); } else { - maybe_elem = CopyFixedArray(FixedArray::cast(elements)); + allocation = CopyFixedArray(FixedArray::cast(elements)); } - if (!maybe_elem->ToObject(&elem)) return maybe_elem; + if (!allocation.To(&elem)) return allocation; } - JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); + JSObject::cast(clone)->set_elements(elem, wb_mode); } // Update properties if necessary. if (properties->length() > 0) { - Object* prop; - { MaybeObject* maybe_prop = CopyFixedArray(properties); - if (!maybe_prop->ToObject(&prop)) return maybe_prop; + FixedArray* prop; + { AllocationResult allocation = CopyFixedArray(properties); + if (!allocation.To(&prop)) return allocation; } - JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); + JSObject::cast(clone)->set_properties(prop, wb_mode); } // Return the new clone. return clone; } -MaybeObject* Heap::ReinitializeJSReceiver( - JSReceiver* object, InstanceType type, int size) { - ASSERT(type >= FIRST_JS_OBJECT_TYPE); - - // Allocate fresh map. - // TODO(rossberg): Once we optimize proxies, cache these maps. - Map* map; - MaybeObject* maybe = AllocateMap(type, size); - if (!maybe->To<Map>(&map)) return maybe; - - // Check that the receiver has at least the size of the fresh object. - int size_difference = object->map()->instance_size() - map->instance_size(); - ASSERT(size_difference >= 0); - - map->set_prototype(object->map()->prototype()); - - // Allocate the backing storage for the properties. - int prop_size = map->unused_property_fields() - map->inobject_properties(); - Object* properties; - maybe = AllocateFixedArray(prop_size, TENURED); - if (!maybe->ToObject(&properties)) return maybe; - - // Functions require some allocation, which might fail here. - SharedFunctionInfo* shared = NULL; - if (type == JS_FUNCTION_TYPE) { - String* name; - OneByteStringKey key(STATIC_ASCII_VECTOR("<freezing call trap>"), - HashSeed()); - maybe = InternalizeStringWithKey(&key); - if (!maybe->To<String>(&name)) return maybe; - maybe = AllocateSharedFunctionInfo(name); - if (!maybe->To<SharedFunctionInfo>(&shared)) return maybe; - } - - // Because of possible retries of this function after failure, - // we must NOT fail after this point, where we have changed the type! - - // Reset the map for the object. - object->set_map(map); - JSObject* jsobj = JSObject::cast(object); - - // Reinitialize the object from the constructor map. - InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map); - - // Functions require some minimal initialization. - if (type == JS_FUNCTION_TYPE) { - map->set_function_with_prototype(true); - InitializeFunction(JSFunction::cast(object), shared, the_hole_value()); - JSFunction::cast(object)->set_context( - isolate()->context()->native_context()); - } - - // Put in filler if the new object is smaller than the old. - if (size_difference > 0) { - CreateFillerObjectAt( - object->address() + map->instance_size(), size_difference); - } - - return object; -} - - -MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, - JSGlobalProxy* object) { - ASSERT(constructor->has_initial_map()); - Map* map = constructor->initial_map(); - - // Check that the already allocated object has the same size and type as - // objects allocated using the constructor. - ASSERT(map->instance_size() == object->map()->instance_size()); - ASSERT(map->instance_type() == object->map()->instance_type()); - - // Allocate the backing storage for the properties. - int prop_size = map->unused_property_fields() - map->inobject_properties(); - Object* properties; - { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, TENURED); - if (!maybe_properties->ToObject(&properties)) return maybe_properties; - } - - // Reset the map for the object. - object->set_map(constructor->initial_map()); - - // Reinitialize the object from the constructor map. - InitializeJSObjectFromMap(object, FixedArray::cast(properties), map); - return object; -} - - -MaybeObject* Heap::AllocateStringFromOneByte(Vector<const uint8_t> string, - PretenureFlag pretenure) { - int length = string.length(); - if (length == 1) { - return Heap::LookupSingleCharacterStringFromCode(string[0]); - } - Object* result; - { MaybeObject* maybe_result = - AllocateRawOneByteString(string.length(), pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - - // Copy the characters into the new object. - CopyChars(SeqOneByteString::cast(result)->GetChars(), - string.start(), - length); - return result; -} - - -MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, - int non_ascii_start, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateStringFromUtf8Slow(Vector<const char> string, + int non_ascii_start, + PretenureFlag pretenure) { // Continue counting the number of characters in the UTF-8 string, starting // from the first non-ascii character or word. Access<UnicodeCache::Utf8Decoder> @@ -4914,16 +3807,16 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, int utf16_length = decoder->Utf16Length(); ASSERT(utf16_length > 0); // Allocate string. - Object* result; + HeapObject* result; { int chars = non_ascii_start + utf16_length; - MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + AllocationResult allocation = AllocateRawTwoByteString(chars, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; + } } - // Convert and copy the characters into the new object. - SeqTwoByteString* twobyte = SeqTwoByteString::cast(result); // Copy ascii portion. - uint16_t* data = twobyte->GetChars(); + uint16_t* data = SeqTwoByteString::cast(result)->GetChars(); if (non_ascii_start != 0) { const char* ascii_data = string.start(); for (int i = 0; i < non_ascii_start; i++) { @@ -4936,52 +3829,30 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, } -MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateStringFromTwoByte(Vector<const uc16> string, + PretenureFlag pretenure) { // Check if the string is an ASCII string. - Object* result; + HeapObject* result; int length = string.length(); const uc16* start = string.start(); if (String::IsOneByte(start, length)) { - MaybeObject* maybe_result = AllocateRawOneByteString(length, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + AllocationResult allocation = AllocateRawOneByteString(length, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; + } CopyChars(SeqOneByteString::cast(result)->GetChars(), start, length); } else { // It's not a one byte string. - MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + AllocationResult allocation = AllocateRawTwoByteString(length, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; + } CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length); } return result; } -Map* Heap::InternalizedStringMapForString(String* string) { - // If the string is in new space it cannot be used as internalized. - if (InNewSpace(string)) return NULL; - - // Find the corresponding internalized string map for strings. - switch (string->map()->instance_type()) { - case STRING_TYPE: return internalized_string_map(); - case ASCII_STRING_TYPE: return ascii_internalized_string_map(); - case CONS_STRING_TYPE: return cons_internalized_string_map(); - case CONS_ASCII_STRING_TYPE: return cons_ascii_internalized_string_map(); - case EXTERNAL_STRING_TYPE: return external_internalized_string_map(); - case EXTERNAL_ASCII_STRING_TYPE: - return external_ascii_internalized_string_map(); - case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: - return external_internalized_string_with_one_byte_data_map(); - case SHORT_EXTERNAL_STRING_TYPE: - return short_external_internalized_string_map(); - case SHORT_EXTERNAL_ASCII_STRING_TYPE: - return short_external_ascii_internalized_string_map(); - case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: - return short_external_internalized_string_with_one_byte_data_map(); - default: return NULL; // No match found. - } -} - - static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, int len) { @@ -5031,7 +3902,7 @@ static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { template<bool is_one_byte, typename T> -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field) { ASSERT(chars >= 0); // Compute map and object size. @@ -5051,12 +3922,12 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(chars); @@ -5075,17 +3946,18 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( // Need explicit instantiations. template -MaybeObject* Heap::AllocateInternalizedStringImpl<true>(String*, int, uint32_t); +AllocationResult Heap::AllocateInternalizedStringImpl<true>( + String*, int, uint32_t); template -MaybeObject* Heap::AllocateInternalizedStringImpl<false>( +AllocationResult Heap::AllocateInternalizedStringImpl<false>( String*, int, uint32_t); template -MaybeObject* Heap::AllocateInternalizedStringImpl<false>( +AllocationResult Heap::AllocateInternalizedStringImpl<false>( Vector<const char>, int, uint32_t); -MaybeObject* Heap::AllocateRawOneByteString(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawOneByteString(int length, + PretenureFlag pretenure) { if (length < 0 || length > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -5093,13 +3965,13 @@ MaybeObject* Heap::AllocateRawOneByteString(int length, ASSERT(size <= SeqOneByteString::kMaxSize); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // Partially initialize the object. - HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); + result->set_map_no_write_barrier(ascii_string_map()); String::cast(result)->set_length(length); String::cast(result)->set_hash_field(String::kEmptyHashField); ASSERT_EQ(size, HeapObject::cast(result)->Size()); @@ -5108,8 +3980,8 @@ MaybeObject* Heap::AllocateRawOneByteString(int length, } -MaybeObject* Heap::AllocateRawTwoByteString(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawTwoByteString(int length, + PretenureFlag pretenure) { if (length < 0 || length > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -5117,13 +3989,13 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, ASSERT(size <= SeqTwoByteString::kMaxSize); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // Partially initialize the object. - HeapObject::cast(result)->set_map_no_write_barrier(string_map()); + result->set_map_no_write_barrier(string_map()); String::cast(result)->set_length(length); String::cast(result)->set_hash_field(String::kEmptyHashField); ASSERT_EQ(size, HeapObject::cast(result)->Size()); @@ -5131,49 +4003,37 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, } -MaybeObject* Heap::AllocateJSArray( - ElementsKind elements_kind, - PretenureFlag pretenure) { - Context* native_context = isolate()->context()->native_context(); - JSFunction* array_function = native_context->array_function(); - Map* map = array_function->initial_map(); - Map* transition_map = isolate()->get_initial_js_array_map(elements_kind); - if (transition_map != NULL) map = transition_map; - return AllocateJSObjectFromMap(map, pretenure); -} - - -MaybeObject* Heap::AllocateEmptyFixedArray() { +AllocationResult Heap::AllocateEmptyFixedArray() { int size = FixedArray::SizeFor(0); - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } // Initialize the object. - reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( - fixed_array_map()); - reinterpret_cast<FixedArray*>(result)->set_length(0); + result->set_map_no_write_barrier(fixed_array_map()); + FixedArray::cast(result)->set_length(0); return result; } -MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { +AllocationResult Heap::AllocateEmptyExternalArray( + ExternalArrayType array_type) { return AllocateExternalArray(0, array_type, NULL, TENURED); } -MaybeObject* Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { +AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { if (!InNewSpace(src)) { return src; } int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(len, TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(len, TENURED); + if (!allocation.To(&obj)) return allocation; } - HeapObject::cast(obj)->set_map_no_write_barrier(fixed_array_map()); + obj->set_map_no_write_barrier(fixed_array_map()); FixedArray* result = FixedArray::cast(obj); result->set_length(len); @@ -5190,26 +4050,26 @@ MaybeObject* Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { } -MaybeObject* Heap::AllocateEmptyFixedTypedArray(ExternalArrayType array_type) { +AllocationResult Heap::AllocateEmptyFixedTypedArray( + ExternalArrayType array_type) { return AllocateFixedTypedArray(0, array_type, TENURED); } -MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { +AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } if (InNewSpace(obj)) { - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); - CopyBlock(dst->address() + kPointerSize, + obj->set_map_no_write_barrier(map); + CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize, FixedArray::SizeFor(len) - kPointerSize); return obj; } - HeapObject::cast(obj)->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); FixedArray* result = FixedArray::cast(obj); result->set_length(len); @@ -5221,48 +4081,47 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { } -MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, - Map* map) { +AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, + Map* map) { int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); CopyBlock( - dst->address() + FixedDoubleArray::kLengthOffset, + obj->address() + FixedDoubleArray::kLengthOffset, src->address() + FixedDoubleArray::kLengthOffset, FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); return obj; } -MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, - Map* map) { +AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, + Map* map) { int int64_entries = src->count_of_int64_entries(); int code_ptr_entries = src->count_of_code_ptr_entries(); int heap_ptr_entries = src->count_of_heap_ptr_entries(); int int32_entries = src->count_of_int32_entries(); - Object* obj; - { MaybeObject* maybe_obj = + HeapObject* obj; + { AllocationResult allocation = AllocateConstantPoolArray(int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + if (!allocation.To(&obj)) return allocation; } - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); int size = ConstantPoolArray::SizeFor( int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); CopyBlock( - dst->address() + ConstantPoolArray::kLengthOffset, + obj->address() + ConstantPoolArray::kLengthOffset, src->address() + ConstantPoolArray::kLengthOffset, size - ConstantPoolArray::kLengthOffset); return obj; } -MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawFixedArray(int length, + PretenureFlag pretenure) { if (length < 0 || length > FixedArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } @@ -5273,20 +4132,20 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { } -MaybeObject* Heap::AllocateFixedArrayWithFiller(int length, - PretenureFlag pretenure, - Object* filler) { +AllocationResult Heap::AllocateFixedArrayWithFiller(int length, + PretenureFlag pretenure, + Object* filler) { ASSERT(length >= 0); ASSERT(empty_fixed_array()->IsFixedArray()); if (length == 0) return empty_fixed_array(); ASSERT(!InNewSpace(filler)); - Object* result; - { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRawFixedArray(length, pretenure); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map()); + result->set_map_no_write_barrier(fixed_array_map()); FixedArray* array = FixedArray::cast(result); array->set_length(length); MemsetPointer(array->data_start(), filler, length); @@ -5294,87 +4153,42 @@ MaybeObject* Heap::AllocateFixedArrayWithFiller(int length, } -MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); } -MaybeObject* Heap::AllocateFixedArrayWithHoles(int length, - PretenureFlag pretenure) { - return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value()); -} - - -MaybeObject* Heap::AllocateUninitializedFixedArray(int length) { +AllocationResult Heap::AllocateUninitializedFixedArray(int length) { if (length == 0) return empty_fixed_array(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } - reinterpret_cast<FixedArray*>(obj)->set_map_no_write_barrier( - fixed_array_map()); + obj->set_map_no_write_barrier(fixed_array_map()); FixedArray::cast(obj)->set_length(length); return obj; } -MaybeObject* Heap::AllocateEmptyFixedDoubleArray() { - int size = FixedDoubleArray::SizeFor(0); - Object* result; - { MaybeObject* maybe_result = - AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - // Initialize the object. - reinterpret_cast<FixedDoubleArray*>(result)->set_map_no_write_barrier( - fixed_double_array_map()); - reinterpret_cast<FixedDoubleArray*>(result)->set_length(0); - return result; -} - - -MaybeObject* Heap::AllocateUninitializedFixedDoubleArray( +AllocationResult Heap::AllocateUninitializedFixedDoubleArray( int length, PretenureFlag pretenure) { if (length == 0) return empty_fixed_array(); - Object* elements_object; - MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure); - if (!maybe_obj->ToObject(&elements_object)) return maybe_obj; - FixedDoubleArray* elements = - reinterpret_cast<FixedDoubleArray*>(elements_object); + HeapObject* elements; + AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); + if (!allocation.To(&elements)) return allocation; elements->set_map_no_write_barrier(fixed_double_array_map()); - elements->set_length(length); + FixedDoubleArray::cast(elements)->set_length(length); return elements; } -MaybeObject* Heap::AllocateFixedDoubleArrayWithHoles( - int length, - PretenureFlag pretenure) { - if (length == 0) return empty_fixed_array(); - - Object* elements_object; - MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure); - if (!maybe_obj->ToObject(&elements_object)) return maybe_obj; - FixedDoubleArray* elements = - reinterpret_cast<FixedDoubleArray*>(elements_object); - - for (int i = 0; i < length; ++i) { - elements->set_the_hole(i); - } - - elements->set_map_no_write_barrier(fixed_double_array_map()); - elements->set_length(length); - return elements; -} - - -MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawFixedDoubleArray(int length, + PretenureFlag pretenure) { if (length < 0 || length > FixedDoubleArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } @@ -5385,20 +4199,26 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); HeapObject* object; - { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_object->To<HeapObject>(&object)) return maybe_object; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&object)) return allocation; } return EnsureDoubleAligned(this, object, size); } -MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, - int number_of_code_ptr_entries, - int number_of_heap_ptr_entries, - int number_of_int32_entries) { - ASSERT(number_of_int64_entries > 0 || number_of_code_ptr_entries > 0 || - number_of_heap_ptr_entries > 0 || number_of_int32_entries > 0); +AllocationResult Heap::AllocateConstantPoolArray(int number_of_int64_entries, + int number_of_code_ptr_entries, + int number_of_heap_ptr_entries, + int number_of_int32_entries) { + CHECK(number_of_int64_entries >= 0 && + number_of_int64_entries <= ConstantPoolArray::kMaxEntriesPerType && + number_of_code_ptr_entries >= 0 && + number_of_code_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType && + number_of_heap_ptr_entries >= 0 && + number_of_heap_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType && + number_of_int32_entries >= 0 && + number_of_int32_entries <= ConstantPoolArray::kMaxEntriesPerType); int size = ConstantPoolArray::SizeFor(number_of_int64_entries, number_of_code_ptr_entries, number_of_heap_ptr_entries, @@ -5409,18 +4229,17 @@ MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); HeapObject* object; - { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); - if (!maybe_object->To<HeapObject>(&object)) return maybe_object; + { AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); + if (!allocation.To(&object)) return allocation; } object = EnsureDoubleAligned(this, object, size); - HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); - - ConstantPoolArray* constant_pool = - reinterpret_cast<ConstantPoolArray*>(object); - constant_pool->SetEntryCounts(number_of_int64_entries, - number_of_code_ptr_entries, - number_of_heap_ptr_entries, - number_of_int32_entries); + object->set_map_no_write_barrier(constant_pool_array_map()); + + ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); + constant_pool->Init(number_of_int64_entries, + number_of_code_ptr_entries, + number_of_heap_ptr_entries, + number_of_int32_entries); if (number_of_code_ptr_entries > 0) { int offset = constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index()); @@ -5441,41 +4260,29 @@ MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, } -MaybeObject* Heap::AllocateEmptyConstantPoolArray() { +AllocationResult Heap::AllocateEmptyConstantPoolArray() { int size = ConstantPoolArray::SizeFor(0, 0, 0, 0); - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map()); - ConstantPoolArray::cast(result)->SetEntryCounts(0, 0, 0, 0); - return result; -} - - -MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { - Object* result; - { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } - reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( - hash_table_map()); - ASSERT(result->IsHashTable()); + result->set_map_no_write_barrier(constant_pool_array_map()); + ConstantPoolArray::cast(result)->Init(0, 0, 0, 0); return result; } -MaybeObject* Heap::AllocateSymbol() { +AllocationResult Heap::AllocateSymbol() { // Statically ensure that it is safe to allocate symbols in paged spaces. STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - MaybeObject* maybe = + HeapObject* result; + AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); - if (!maybe->ToObject(&result)) return maybe; + if (!allocation.To(&result)) return allocation; - HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); + result->set_map_no_write_barrier(symbol_map()); // Generate a random hash value. int hash; @@ -5496,159 +4303,7 @@ MaybeObject* Heap::AllocateSymbol() { } -MaybeObject* Heap::AllocatePrivateSymbol() { - MaybeObject* maybe = AllocateSymbol(); - Symbol* symbol; - if (!maybe->To(&symbol)) return maybe; - symbol->set_is_private(true); - return symbol; -} - - -MaybeObject* Heap::AllocateNativeContext() { - Object* result; - { MaybeObject* maybe_result = - AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(native_context_map()); - context->set_js_array_maps(undefined_value()); - ASSERT(context->IsNativeContext()); - ASSERT(result->IsContext()); - return result; -} - - -MaybeObject* Heap::AllocateGlobalContext(JSFunction* function, - ScopeInfo* scope_info) { - Object* result; - { MaybeObject* maybe_result = - AllocateFixedArray(scope_info->ContextLength(), TENURED); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(global_context_map()); - context->set_closure(function); - context->set_previous(function->context()); - context->set_extension(scope_info); - context->set_global_object(function->context()->global_object()); - ASSERT(context->IsGlobalContext()); - ASSERT(result->IsContext()); - return context; -} - - -MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) { - Object* result; - { MaybeObject* maybe_result = - AllocateFixedArray(scope_info->ContextLength(), TENURED); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(module_context_map()); - // Instance link will be set later. - context->set_extension(Smi::FromInt(0)); - return context; -} - - -MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) { - ASSERT(length >= Context::MIN_CONTEXT_SLOTS); - Object* result; - { MaybeObject* maybe_result = AllocateFixedArray(length); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(function_context_map()); - context->set_closure(function); - context->set_previous(function->context()); - context->set_extension(Smi::FromInt(0)); - context->set_global_object(function->context()->global_object()); - return context; -} - - -MaybeObject* Heap::AllocateCatchContext(JSFunction* function, - Context* previous, - String* name, - Object* thrown_object) { - STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); - Object* result; - { MaybeObject* maybe_result = - AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(catch_context_map()); - context->set_closure(function); - context->set_previous(previous); - context->set_extension(name); - context->set_global_object(previous->global_object()); - context->set(Context::THROWN_OBJECT_INDEX, thrown_object); - return context; -} - - -MaybeObject* Heap::AllocateWithContext(JSFunction* function, - Context* previous, - JSReceiver* extension) { - Object* result; - { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(with_context_map()); - context->set_closure(function); - context->set_previous(previous); - context->set_extension(extension); - context->set_global_object(previous->global_object()); - return context; -} - - -MaybeObject* Heap::AllocateBlockContext(JSFunction* function, - Context* previous, - ScopeInfo* scope_info) { - Object* result; - { MaybeObject* maybe_result = - AllocateFixedArrayWithHoles(scope_info->ContextLength()); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Context* context = reinterpret_cast<Context*>(result); - context->set_map_no_write_barrier(block_context_map()); - context->set_closure(function); - context->set_previous(previous); - context->set_extension(scope_info); - context->set_global_object(previous->global_object()); - return context; -} - - -MaybeObject* Heap::AllocateScopeInfo(int length) { - FixedArray* scope_info; - MaybeObject* maybe_scope_info = AllocateFixedArray(length, TENURED); - if (!maybe_scope_info->To(&scope_info)) return maybe_scope_info; - scope_info->set_map_no_write_barrier(scope_info_map()); - return scope_info; -} - - -MaybeObject* Heap::AllocateExternal(void* value) { - Foreign* foreign; - { MaybeObject* maybe_result = AllocateForeign(static_cast<Address>(value)); - if (!maybe_result->To(&foreign)) return maybe_result; - } - JSObject* external; - { MaybeObject* maybe_result = AllocateJSObjectFromMap(external_map()); - if (!maybe_result->To(&external)) return maybe_result; - } - external->SetInternalField(0, foreign); - return external; -} - - -MaybeObject* Heap::AllocateStruct(InstanceType type) { +AllocationResult Heap::AllocateStruct(InstanceType type) { Map* map; switch (type) { #define MAKE_CASE(NAME, Name, name) \ @@ -5657,15 +4312,15 @@ STRUCT_LIST(MAKE_CASE) #undef MAKE_CASE default: UNREACHABLE(); - return Failure::InternalError(); + return exception(); } int size = map->instance_size(); AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); - Object* result; - { MaybeObject* maybe_result = Allocate(map, space); - if (!maybe_result->ToObject(&result)) return maybe_result; + Struct* result; + { AllocationResult allocation = Allocate(map, space); + if (!allocation.To(&result)) return allocation; } - Struct::cast(result)->InitializeBody(size); + result->InitializeBody(size); return result; } @@ -5740,7 +4395,7 @@ bool Heap::IdleNotification(int hint) { return false; } - if (!FLAG_incremental_marking || Serializer::enabled()) { + if (!FLAG_incremental_marking || Serializer::enabled(isolate_)) { return IdleGlobalGC(); } @@ -5750,17 +4405,8 @@ bool Heap::IdleNotification(int hint) { // An incremental GC progresses as follows: // 1. many incremental marking steps, // 2. one old space mark-sweep-compact, - // 3. many lazy sweep steps. // Use mark-sweep-compact events to count incremental GCs in a round. - if (incremental_marking()->IsStopped()) { - if (!mark_compact_collector()->AreSweeperThreadsActivated() && - !IsSweepingComplete() && - !AdvanceSweepers(static_cast<int>(step_size))) { - return false; - } - } - if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) { if (EnoughGarbageSinceLastIdleRound()) { StartIdleRound(); @@ -5796,6 +4442,13 @@ bool Heap::IdleNotification(int hint) { return true; } + // If the IdleNotifcation is called with a large hint we will wait for + // the sweepter threads here. + if (hint >= kMinHintForFullGC && + mark_compact_collector()->IsConcurrentSweepingInProgress()) { + mark_compact_collector()->WaitUntilSweepingCompleted(); + } + return false; } @@ -5966,8 +4619,10 @@ bool Heap::InSpace(Address addr, AllocationSpace space) { return property_cell_space_->Contains(addr); case LO_SPACE: return lo_space_->SlowContains(addr); + case INVALID_SPACE: + break; } - + UNREACHABLE(); return false; } @@ -5975,6 +4630,7 @@ bool Heap::InSpace(Address addr, AllocationSpace space) { #ifdef VERIFY_HEAP void Heap::Verify() { CHECK(HasBeenSetUp()); + HandleScope scope(isolate()); store_buffer()->Verify(); @@ -6000,52 +4656,6 @@ void Heap::Verify() { #endif -MaybeObject* Heap::InternalizeUtf8String(Vector<const char> string) { - Utf8StringKey key(string, HashSeed()); - return InternalizeStringWithKey(&key); -} - - -MaybeObject* Heap::InternalizeString(String* string) { - if (string->IsInternalizedString()) return string; - Object* result = NULL; - Object* new_table; - { MaybeObject* maybe_new_table = - string_table()->LookupString(string, &result); - if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table; - } - // Can't use set_string_table because StringTable::cast knows that - // StringTable is a singleton and checks for identity. - roots_[kStringTableRootIndex] = new_table; - ASSERT(result != NULL); - return result; -} - - -bool Heap::InternalizeStringIfExists(String* string, String** result) { - if (string->IsInternalizedString()) { - *result = string; - return true; - } - return string_table()->LookupStringIfExists(string, result); -} - - -MaybeObject* Heap::InternalizeStringWithKey(HashTableKey* key) { - Object* result = NULL; - Object* new_table; - { MaybeObject* maybe_new_table = - string_table()->LookupKey(key, &result); - if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table; - } - // Can't use set_string_table because StringTable::cast knows that - // StringTable is a singleton and checks for identity. - roots_[kStringTableRootIndex] = new_table; - ASSERT(result != NULL); - return result; -} - - void Heap::ZapFromSpace() { NewSpacePageIterator it(new_space_.FromSpaceStart(), new_space_.FromSpaceEnd()); @@ -6303,12 +4913,9 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { Relocatable::Iterate(isolate_, v); v->Synchronize(VisitorSynchronization::kRelocatable); -#ifdef ENABLE_DEBUGGER_SUPPORT - isolate_->debug()->Iterate(v); if (isolate_->deoptimizer_data() != NULL) { isolate_->deoptimizer_data()->Iterate(v); } -#endif v->Synchronize(VisitorSynchronization::kDebug); isolate_->compilation_cache()->Iterate(v); v->Synchronize(VisitorSynchronization::kCompilationCache); @@ -6373,10 +4980,22 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { // and through the API, we should gracefully handle the case that the heap // size is not big enough to fit all the initial objects. bool Heap::ConfigureHeap(int max_semispace_size, - intptr_t max_old_gen_size, - intptr_t max_executable_size) { + intptr_t max_old_space_size, + intptr_t max_executable_size, + intptr_t code_range_size) { if (HasBeenSetUp()) return false; + // If max space size flags are specified overwrite the configuration. + if (FLAG_max_new_space_size > 0) { + max_semispace_size = (FLAG_max_new_space_size / 2) * kLumpOfMemory; + } + if (FLAG_max_old_space_size > 0) { + max_old_space_size = FLAG_max_old_space_size * kLumpOfMemory; + } + if (FLAG_max_executable_size > 0) { + max_executable_size = FLAG_max_executable_size * kLumpOfMemory; + } + if (FLAG_stress_compaction) { // This will cause more frequent GCs when stressing. max_semispace_size_ = Page::kPageSize; @@ -6412,7 +5031,7 @@ bool Heap::ConfigureHeap(int max_semispace_size, reserved_semispace_size_ = max_semispace_size_; } - if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size; + if (max_old_space_size > 0) max_old_generation_size_ = max_old_space_size; if (max_executable_size > 0) { max_executable_size_ = RoundUp(max_executable_size, Page::kPageSize); } @@ -6448,6 +5067,14 @@ bool Heap::ConfigureHeap(int max_semispace_size, FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + AllocationMemento::kSize)); + code_range_size_ = code_range_size; + + // We set the old generation growing factor to 2 to grow the heap slower on + // memory-constrained devices. + if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice) { + old_space_growing_factor_ = 2; + } + configured_ = true; return true; } @@ -6456,7 +5083,8 @@ bool Heap::ConfigureHeap(int max_semispace_size, bool Heap::ConfigureHeapDefault() { return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) * KB, static_cast<intptr_t>(FLAG_max_old_space_size) * MB, - static_cast<intptr_t>(FLAG_max_executable_size) * MB); + static_cast<intptr_t>(FLAG_max_executable_size) * MB, + static_cast<intptr_t>(0)); } @@ -6510,14 +5138,6 @@ intptr_t Heap::PromotedSpaceSizeOfObjects() { } -bool Heap::AdvanceSweepers(int step_size) { - ASSERT(!mark_compact_collector()->AreSweeperThreadsActivated()); - bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size); - sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size); - return sweeping_complete; -} - - int64_t Heap::PromotedExternalMemorySize() { if (amount_of_external_allocated_memory_ <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; @@ -6609,16 +5229,10 @@ bool Heap::SetUp() { if (old_data_space_ == NULL) return false; if (!old_data_space_->SetUp()) return false; + if (!isolate_->code_range()->SetUp(code_range_size_)) return false; + // Initialize the code space, set its maximum capacity to the old // generation size. It needs executable memory. - // On 64-bit platform(s), we put all code objects in a 2 GB range of - // virtual address space, so that they can call each other with near calls. - if (code_range_size_ > 0) { - if (!isolate_->code_range()->SetUp(code_range_size_)) { - return false; - } - } - code_space_ = new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE); if (code_space_ == NULL) return false; @@ -6672,10 +5286,11 @@ bool Heap::SetUp() { bool Heap::CreateHeapObjects() { // Create initial maps. if (!CreateInitialMaps()) return false; - if (!CreateApiObjects()) return false; + CreateApiObjects(); // Create initial objects - if (!CreateInitialObjects()) return false; + CreateInitialObjects(); + CHECK_EQ(0, gc_count_); native_contexts_list_ = undefined_value(); array_buffers_list_ = undefined_value(); @@ -6853,24 +5468,25 @@ void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) { } -MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj, - DependentCode* dep) { - ASSERT(!InNewSpace(obj)); - ASSERT(!InNewSpace(dep)); - MaybeObject* maybe_obj = - WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep); - WeakHashTable* table; - if (!maybe_obj->To(&table)) return maybe_obj; - if (ShouldZapGarbage() && weak_object_to_code_table_ != table) { +// TODO(ishell): Find a better place for this. +void Heap::AddWeakObjectToCodeDependency(Handle<Object> obj, + Handle<DependentCode> dep) { + ASSERT(!InNewSpace(*obj)); + ASSERT(!InNewSpace(*dep)); + HandleScope scope(isolate()); + Handle<WeakHashTable> table(WeakHashTable::cast(weak_object_to_code_table_), + isolate()); + table = WeakHashTable::Put(table, obj, dep); + + if (ShouldZapGarbage() && weak_object_to_code_table_ != *table) { WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value()); } - set_weak_object_to_code_table(table); - ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj)); - return weak_object_to_code_table_; + set_weak_object_to_code_table(*table); + ASSERT_EQ(*dep, table->Lookup(obj)); } -DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) { +DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<Object> obj) { Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj); if (dep->IsDependentCode()) return DependentCode::cast(dep); return DependentCode::cast(empty_fixed_array()); @@ -6879,7 +5495,8 @@ DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) { void Heap::EnsureWeakObjectToCodeTable() { if (!weak_object_to_code_table()->IsHashTable()) { - set_weak_object_to_code_table(*isolate()->factory()->NewWeakHashTable(16)); + set_weak_object_to_code_table(*WeakHashTable::New( + isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY, TENURED)); } } @@ -7603,19 +6220,21 @@ const char* GCTracer::CollectorString() { } -int KeyedLookupCache::Hash(Map* map, Name* name) { +int KeyedLookupCache::Hash(Handle<Map> map, Handle<Name> name) { + DisallowHeapAllocation no_gc; // Uses only lower 32 bits if pointers are larger. uintptr_t addr_hash = - static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift; + static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*map)) >> kMapHashShift; return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask); } -int KeyedLookupCache::Lookup(Map* map, Name* name) { +int KeyedLookupCache::Lookup(Handle<Map> map, Handle<Name> name) { + DisallowHeapAllocation no_gc; int index = (Hash(map, name) & kHashMask); for (int i = 0; i < kEntriesPerBucket; i++) { Key& key = keys_[index + i]; - if ((key.map == map) && key.name->Equals(name)) { + if ((key.map == *map) && key.name->Equals(*name)) { return field_offsets_[index + i]; } } @@ -7623,18 +6242,20 @@ int KeyedLookupCache::Lookup(Map* map, Name* name) { } -void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { +void KeyedLookupCache::Update(Handle<Map> map, + Handle<Name> name, + int field_offset) { + DisallowHeapAllocation no_gc; if (!name->IsUniqueName()) { - String* internalized_string; - if (!map->GetIsolate()->heap()->InternalizeStringIfExists( - String::cast(name), &internalized_string)) { + if (!StringTable::InternalizeStringIfExists(name->GetIsolate(), + Handle<String>::cast(name)). + ToHandle(&name)) { return; } - name = internalized_string; } // This cache is cleared only between mark compact passes, so we expect the // cache to only contain old space names. - ASSERT(!map->GetIsolate()->heap()->InNewSpace(name)); + ASSERT(!map->GetIsolate()->heap()->InNewSpace(*name)); int index = (Hash(map, name) & kHashMask); // After a GC there will be free slots, so we use them in order (this may @@ -7643,8 +6264,8 @@ void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { Key& key = keys_[index]; Object* free_entry_indicator = NULL; if (key.map == free_entry_indicator) { - key.map = map; - key.name = name; + key.map = *map; + key.name = *name; field_offsets_[index + i] = field_offset; return; } @@ -7660,8 +6281,8 @@ void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { // Write the new first entry. Key& key = keys_[index]; - key.map = map; - key.name = name; + key.map = *map; + key.name = *name; field_offsets_[index] = field_offset; } @@ -7676,16 +6297,6 @@ void DescriptorLookupCache::Clear() { } -#ifdef DEBUG -void Heap::GarbageCollectionGreedyCheck() { - ASSERT(FLAG_gc_greedy); - if (isolate_->bootstrapper()->IsActive()) return; - if (!AllowAllocationFailure::IsAllowed(isolate_)) return; - CollectGarbage(NEW_SPACE); -} -#endif - - void ExternalStringTable::CleanUp() { int last = 0; for (int i = 0; i < new_space_strings_.length(); ++i) { diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h index 0f586e928..9c100fcf1 100644 --- a/deps/v8/src/heap.h +++ b/deps/v8/src/heap.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HEAP_H_ #define V8_HEAP_H_ @@ -32,6 +9,7 @@ #include "allocation.h" #include "assert-scope.h" +#include "counters.h" #include "globals.h" #include "incremental-marking.h" #include "list.h" @@ -40,7 +18,6 @@ #include "spaces.h" #include "splay-tree-inl.h" #include "store-buffer.h" -#include "v8-counters.h" #include "v8globals.h" namespace v8 { @@ -60,6 +37,7 @@ namespace internal { V(Oddball, true_value, TrueValue) \ V(Oddball, false_value, FalseValue) \ V(Oddball, uninitialized_value, UninitializedValue) \ + V(Oddball, exception, Exception) \ V(Map, cell_map, CellMap) \ V(Map, global_property_cell_map, GlobalPropertyCellMap) \ V(Map, shared_function_info_map, SharedFunctionInfoMap) \ @@ -72,8 +50,9 @@ namespace internal { V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ V(Map, fixed_double_array_map, FixedDoubleArrayMap) \ V(Map, constant_pool_array_map, ConstantPoolArrayMap) \ - V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ + V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ V(Map, hash_table_map, HashTableMap) \ + V(Map, ordered_hash_table_map, OrderedHashTableMap) \ V(FixedArray, empty_fixed_array, EmptyFixedArray) \ V(ByteArray, empty_byte_array, EmptyByteArray) \ V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ @@ -89,7 +68,7 @@ namespace internal { V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ V(FixedArray, string_split_cache, StringSplitCache) \ V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \ - V(Object, termination_exception, TerminationException) \ + V(Oddball, termination_exception, TerminationException) \ V(Smi, hash_seed, HashSeed) \ V(Map, symbol_map, SymbolMap) \ V(Map, string_map, StringMap) \ @@ -109,8 +88,6 @@ namespace internal { ShortExternalStringWithOneByteDataMap) \ V(Map, internalized_string_map, InternalizedStringMap) \ V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \ - V(Map, cons_internalized_string_map, ConsInternalizedStringMap) \ - V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap) \ V(Map, \ external_internalized_string_map, \ ExternalInternalizedStringMap) \ @@ -181,7 +158,15 @@ namespace internal { V(Map, block_context_map, BlockContextMap) \ V(Map, module_context_map, ModuleContextMap) \ V(Map, global_context_map, GlobalContextMap) \ - V(Map, oddball_map, OddballMap) \ + V(Map, undefined_map, UndefinedMap) \ + V(Map, the_hole_map, TheHoleMap) \ + V(Map, null_map, NullMap) \ + V(Map, boolean_map, BooleanMap) \ + V(Map, uninitialized_map, UninitializedMap) \ + V(Map, arguments_marker_map, ArgumentsMarkerMap) \ + V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \ + V(Map, exception_map, ExceptionMap) \ + V(Map, termination_exception_map, TerminationExceptionMap) \ V(Map, message_object_map, JSMessageObjectMap) \ V(Map, foreign_map, ForeignMap) \ V(HeapNumber, nan_value, NanValue) \ @@ -255,6 +240,7 @@ namespace internal { V(constant_pool_array_map) \ V(no_interceptor_result_sentinel) \ V(hash_table_map) \ + V(ordered_hash_table_map) \ V(empty_fixed_array) \ V(empty_byte_array) \ V(empty_descriptor_array) \ @@ -268,7 +254,11 @@ namespace internal { V(block_context_map) \ V(module_context_map) \ V(global_context_map) \ - V(oddball_map) \ + V(undefined_map) \ + V(the_hole_map) \ + V(null_map) \ + V(boolean_map) \ + V(uninitialized_map) \ V(message_object_map) \ V(foreign_map) \ V(neander_map) @@ -337,7 +327,6 @@ namespace internal { "KeyedStoreElementMonomorphic") \ V(stack_overflow_string, "kStackOverflowBoilerplate") \ V(illegal_access_string, "illegal access") \ - V(illegal_execution_state_string, "illegal execution state") \ V(get_string, "get") \ V(set_string, "set") \ V(map_field_string, "%map") \ @@ -349,11 +338,7 @@ namespace internal { V(MakeReferenceError_string, "MakeReferenceError") \ V(MakeSyntaxError_string, "MakeSyntaxError") \ V(MakeTypeError_string, "MakeTypeError") \ - V(illegal_return_string, "illegal_return") \ - V(illegal_break_string, "illegal_break") \ - V(illegal_continue_string, "illegal_continue") \ V(unknown_label_string, "unknown_label") \ - V(redeclaration_string, "redeclaration") \ V(space_string, " ") \ V(exec_string, "exec") \ V(zero_string, "0") \ @@ -376,7 +361,9 @@ namespace internal { V(next_string, "next") \ V(byte_length_string, "byteLength") \ V(byte_offset_string, "byteOffset") \ - V(buffer_string, "buffer") + V(buffer_string, "buffer") \ + V(intl_initialized_marker_string, "v8::intl_initialized_marker") \ + V(intl_impl_object_string, "v8::intl_object") // Forward declarations. class GCTracer; @@ -562,8 +549,9 @@ class Heap { // Configure heap size before setup. Return false if the heap has been // set up already. bool ConfigureHeap(int max_semispace_size, - intptr_t max_old_gen_size, - intptr_t max_executable_size); + intptr_t max_old_space_size, + intptr_t max_executable_size, + intptr_t code_range_size); bool ConfigureHeapDefault(); // Prepares the heap, setting up memory areas that are needed in the isolate @@ -694,145 +682,11 @@ class Heap { return old_data_space_->allocation_limit_address(); } - // Allocates and initializes a new JavaScript object based on a - // constructor. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // If allocation_site is non-null, then a memento is emitted after the object - // that points to the site. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateJSObject( - JSFunction* constructor, - PretenureFlag pretenure = NOT_TENURED, - AllocationSite* allocation_site = NULL); - - MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context, - ScopeInfo* scope_info); - - // Allocate a JSArray with no elements - MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray( - ElementsKind elements_kind, - PretenureFlag pretenure = NOT_TENURED) { - return AllocateJSArrayAndStorage(elements_kind, 0, 0, - DONT_INITIALIZE_ARRAY_ELEMENTS, - pretenure); - } - - // Allocate a JSArray with a specified length but elements that are left - // uninitialized. - MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage( - ElementsKind elements_kind, - int length, - int capacity, - ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS, - PretenureFlag pretenure = NOT_TENURED); - - MUST_USE_RESULT MaybeObject* AllocateJSArrayStorage( - JSArray* array, - int length, - int capacity, - ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS); - - // Allocate a JSArray with no elements - MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements( - FixedArrayBase* array_base, - ElementsKind elements_kind, - int length, - PretenureFlag pretenure = NOT_TENURED); - // Returns a deep copy of the JavaScript object. // Properties and elements are copied too. - // Returns failure if allocation failed. // Optionally takes an AllocationSite to be appended in an AllocationMemento. - MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source, - AllocationSite* site = NULL); - - // Allocates a JS ArrayBuffer object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer(); - - // Allocates a Harmony proxy or function proxy. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler, - Object* prototype); - - MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler, - Object* call_trap, - Object* construct_trap, - Object* prototype); - - // Reinitialize a JSReceiver into an (empty) JS object of respective type and - // size, but keeping the original prototype. The receiver must have at least - // the size of the new object. The object is reinitialized and behaves as an - // object that has been freshly allocated. - // Returns failure if an error occured, otherwise object. - MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object, - InstanceType type, - int size); - - // Reinitialize an JSGlobalProxy based on a constructor. The object - // must have the same size as objects allocated using the - // constructor. The object is reinitialized and behaves as an - // object that has been freshly allocated using the constructor. - MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy( - JSFunction* constructor, JSGlobalProxy* global); - - // Allocates and initializes a new JavaScript object based on a map. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Passing an allocation site means that a memento will be created that - // points to the site. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap( - Map* map, - PretenureFlag pretenure = NOT_TENURED, - bool alloc_props = true, - AllocationSite* allocation_site = NULL); - - // Allocates a heap object based on the map. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space, - AllocationSite* allocation_site = NULL); - - // Allocates a JS Map in the heap. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateMap( - InstanceType instance_type, - int instance_size, - ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); - - // Allocates a partial map for bootstrapping. - MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type, - int instance_size); - - // Allocates an empty code cache. - MUST_USE_RESULT MaybeObject* AllocateCodeCache(); - - // Allocates a serialized scope info. - MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length); - - // Allocates an External object for v8's external API. - MUST_USE_RESULT MaybeObject* AllocateExternal(void* value); - - // Allocates an empty PolymorphicCodeCache. - MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache(); - - // Allocates a pre-tenured empty AccessorPair. - MUST_USE_RESULT MaybeObject* AllocateAccessorPair(); - - // Allocates an empty TypeFeedbackInfo. - MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo(); - - // Allocates an AliasedArgumentsEntry. - MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot); + MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source, + AllocationSite* site = NULL); // Clear the Instanceof cache (used when a prototype changes). inline void ClearInstanceofCache(); @@ -843,257 +697,13 @@ class Heap { // For use during bootup. void RepairFreeListsAfterBoot(); - // Allocates and fully initializes a String. There are two String - // encodings: ASCII and two byte. One should choose between the three string - // allocation functions based on the encoding of the string buffer used to - // initialized the string. - // - ...FromAscii initializes the string from a buffer that is ASCII - // encoded (it does not check that the buffer is ASCII encoded) and the - // result will be ASCII encoded. - // - ...FromUTF8 initializes the string from a buffer that is UTF-8 - // encoded. If the characters are all single-byte characters, the - // result will be ASCII encoded, otherwise it will converted to two - // byte. - // - ...FromTwoByte initializes the string from a buffer that is two-byte - // encoded. If the characters are all single-byte characters, the - // result will be converted to ASCII, otherwise it will be left as - // two-byte. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateStringFromOneByte( - Vector<const uint8_t> str, - PretenureFlag pretenure = NOT_TENURED); - // TODO(dcarney): remove this function. - MUST_USE_RESULT inline MaybeObject* AllocateStringFromOneByte( - Vector<const char> str, - PretenureFlag pretenure = NOT_TENURED) { - return AllocateStringFromOneByte(Vector<const uint8_t>::cast(str), - pretenure); - } - MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8( - Vector<const char> str, - PretenureFlag pretenure = NOT_TENURED); - MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow( - Vector<const char> str, - int non_ascii_start, - PretenureFlag pretenure = NOT_TENURED); - MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte( - Vector<const uc16> str, - PretenureFlag pretenure = NOT_TENURED); - - // Allocates an internalized string in old space based on the character - // stream. Returns Failure::RetryAfterGC(requested_bytes, space) if the - // allocation failed. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringFromUtf8( - Vector<const char> str, - int chars, - uint32_t hash_field); - - MUST_USE_RESULT inline MaybeObject* AllocateOneByteInternalizedString( - Vector<const uint8_t> str, - uint32_t hash_field); - - MUST_USE_RESULT inline MaybeObject* AllocateTwoByteInternalizedString( - Vector<const uc16> str, - uint32_t hash_field); - template<typename T> static inline bool IsOneByte(T t, int chars); - template<typename T> - MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringImpl( - T t, int chars, uint32_t hash_field); - - template<bool is_one_byte, typename T> - MUST_USE_RESULT MaybeObject* AllocateInternalizedStringImpl( - T t, int chars, uint32_t hash_field); - - // Allocates and partially initializes a String. There are two String - // encodings: ASCII and two byte. These functions allocate a string of the - // given length and set its map and length fields. The characters of the - // string are uninitialized. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateRawOneByteString( - int length, - PretenureFlag pretenure = NOT_TENURED); - MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString( - int length, - PretenureFlag pretenure = NOT_TENURED); - - // Computes a single character string where the character has code. - // A cache is used for ASCII codes. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode( - uint16_t code); - - // Allocate a byte array of the specified length - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateByteArray( - int length, - PretenureFlag pretenure = NOT_TENURED); - - // Allocates an external array of the specified length and type. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateExternalArray( - int length, - ExternalArrayType array_type, - void* external_pointer, - PretenureFlag pretenure); - - // Allocates a fixed typed array of the specified length and type. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateFixedTypedArray( - int length, - ExternalArrayType array_type, - PretenureFlag pretenure); - - // Allocate a symbol in old space. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateSymbol(); - MUST_USE_RESULT MaybeObject* AllocatePrivateSymbol(); - - // Allocate a tenured AllocationSite. It's payload is null - MUST_USE_RESULT MaybeObject* AllocateAllocationSite(); - - // Allocates a fixed array initialized with undefined values - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateFixedArray( - int length, - PretenureFlag pretenure = NOT_TENURED); - - // Allocates an uninitialized fixed array. It must be filled by the caller. - // - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length); - // Move len elements within a given array from src_index index to dst_index // index. void MoveElements(FixedArray* array, int dst_index, int src_index, int len); - // Make a copy of src and return it. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src); - - // Make a copy of src and return it. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT MaybeObject* CopyAndTenureFixedCOWArray(FixedArray* src); - - // Make a copy of src, set the map, and return the copy. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map); - - // Make a copy of src and return it. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray( - FixedDoubleArray* src); - - // Make a copy of src, set the map, and return the copy. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap( - FixedDoubleArray* src, Map* map); - - // Make a copy of src and return it. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyConstantPoolArray( - ConstantPoolArray* src); - - // Make a copy of src, set the map, and return the copy. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT MaybeObject* CopyConstantPoolArrayWithMap( - ConstantPoolArray* src, Map* map); - - // Allocates a fixed array initialized with the hole values. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles( - int length, - PretenureFlag pretenure = NOT_TENURED); - - MUST_USE_RESULT MaybeObject* AllocateConstantPoolArray( - int number_of_int64_entries, - int number_of_code_ptr_entries, - int number_of_heap_ptr_entries, - int number_of_int32_entries); - - // Allocates a fixed double array with uninitialized values. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray( - int length, - PretenureFlag pretenure = NOT_TENURED); - - // Allocates a fixed double array with hole values. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles( - int length, - PretenureFlag pretenure = NOT_TENURED); - - // AllocateHashTable is identical to AllocateFixedArray except - // that the resulting object has hash_table_map as map. - MUST_USE_RESULT MaybeObject* AllocateHashTable( - int length, PretenureFlag pretenure = NOT_TENURED); - - // Allocate a native (but otherwise uninitialized) context. - MUST_USE_RESULT MaybeObject* AllocateNativeContext(); - - // Allocate a global context. - MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function, - ScopeInfo* scope_info); - - // Allocate a module context. - MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info); - - // Allocate a function context. - MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length, - JSFunction* function); - - // Allocate a catch context. - MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function, - Context* previous, - String* name, - Object* thrown_object); - // Allocate a 'with' context. - MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function, - Context* previous, - JSReceiver* extension); - - // Allocate a block context. - MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function, - Context* previous, - ScopeInfo* info); - - // Allocates a new utility object in the old generation. - MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type); - - // Allocates a function initialized with a shared part. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateFunction( - Map* function_map, - SharedFunctionInfo* shared, - Object* prototype, - PretenureFlag pretenure = TENURED); - // Sloppy mode arguments object size. static const int kSloppyArgumentsObjectSize = JSObject::kHeaderSize + 2 * kPointerSize; @@ -1105,84 +715,10 @@ class Heap { // callee is only valid in sloppy mode. static const int kArgumentsCalleeIndex = 1; - // Allocates an arguments object - optionally with an elements array. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateArgumentsObject( - Object* callee, int length); - - // Same as NewNumberFromDouble, but may return a preallocated/immutable - // number object (e.g., minus_zero_value_, nan_value_) - MUST_USE_RESULT MaybeObject* NumberFromDouble( - double value, PretenureFlag pretenure = NOT_TENURED); - - // Allocated a HeapNumber from value. - MUST_USE_RESULT MaybeObject* AllocateHeapNumber( - double value, PretenureFlag pretenure = NOT_TENURED); - - // Converts an int into either a Smi or a HeapNumber object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* NumberFromInt32( - int32_t value, PretenureFlag pretenure = NOT_TENURED); - - // Converts an int into either a Smi or a HeapNumber object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* NumberFromUint32( - uint32_t value, PretenureFlag pretenure = NOT_TENURED); - - // Allocates a new foreign object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateForeign( - Address address, PretenureFlag pretenure = NOT_TENURED); - - // Allocates a new SharedFunctionInfo object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name); - - // Allocates a new JSMessageObject object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note that this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateJSMessageObject( - String* type, - JSArray* arguments, - int start_position, - int end_position, - Object* script, - Object* stack_frames); - - // Allocate a new external string object, which is backed by a string - // resource that resides outside the V8 heap. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii( - const ExternalAsciiString::Resource* resource); - MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte( - const ExternalTwoByteString::Resource* resource); - // Finalizes an external string by deleting the associated external // data and clearing the resource pointer. inline void FinalizeExternalString(String* string); - // Allocates an uninitialized object. The memory is non-executable if the - // hardware and OS allow. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes, - AllocationSpace space, - AllocationSpace retry_space); - // Initialize a filler object to keep the ability to iterate over the heap // when shortening objects. void CreateFillerObjectAt(Address addr, int size); @@ -1194,55 +730,6 @@ class Heap { // Maintain marking consistency for IncrementalMarking. void AdjustLiveBytes(Address address, int by, InvocationMode mode); - // Makes a new native code object - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. On success, the pointer to the Code object is stored in the - // self_reference. This allows generated code to reference its own Code - // object by containing this pointer. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* CreateCode( - const CodeDesc& desc, - Code::Flags flags, - Handle<Object> self_reference, - bool immovable = false, - bool crankshafted = false, - int prologue_offset = Code::kPrologueOffsetNotSet); - - MUST_USE_RESULT MaybeObject* CopyCode(Code* code); - - // Copy the code and scope info part of the code object, but insert - // the provided data as the relocation information. - MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info); - - // Finds the internalized copy for string in the string table. - // If not found, a new string is added to the table and returned. - // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation - // failed. - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT MaybeObject* InternalizeUtf8String(const char* str) { - return InternalizeUtf8String(CStrVector(str)); - } - MUST_USE_RESULT MaybeObject* InternalizeUtf8String(Vector<const char> str); - - MUST_USE_RESULT MaybeObject* InternalizeString(String* str); - MUST_USE_RESULT MaybeObject* InternalizeStringWithKey(HashTableKey* key); - - bool InternalizeStringIfExists(String* str, String** result); - bool InternalizeTwoCharsStringIfExists(String* str, String** result); - - // Compute the matching internalized string map for a string if possible. - // NULL is returned if string is in new space or not flattened. - Map* InternalizedStringMapForString(String* str); - - // Tries to flatten a string before compare operation. - // - // Returns a failure in case it was decided that flattening was - // necessary and failed. Note, if flattening is not necessary the - // string might stay non-flat even when not a failure is returned. - // - // Please note this function does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str); - // Converts the given boolean condition to JavaScript boolean value. inline Object* ToBoolean(bool condition); @@ -1301,11 +788,6 @@ class Heap { PromotionQueue* promotion_queue() { return &promotion_queue_; } -#ifdef DEBUG - // Utility used with flag gc-greedy. - void GarbageCollectionGreedyCheck(); -#endif - void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate = true); @@ -1531,6 +1013,10 @@ class Heap { RECORD_SCRATCHPAD_SLOT }; + // If an object has an AllocationMemento trailing it, return it, otherwise + // return NULL; + inline AllocationMemento* FindAllocationMemento(HeapObject* object); + // An object may have an AllocationSite associated with it through a trailing // AllocationMemento. Its feedback should be updated when objects are found // in the heap. @@ -1545,14 +1031,7 @@ class Heap { // Support for the API. // - bool CreateApiObjects(); - - // Attempt to find the number in a small cache. If we finds it, return - // the string representation of the number. Otherwise return undefined. - Object* GetNumberStringCache(Object* number); - - // Update the cache with a new number-string pair. - void SetNumberStringCache(Object* number, String* str); + void CreateApiObjects(); // Adjusts the amount of registered external memory. // Returns the adjusted value. @@ -1593,11 +1072,31 @@ class Heap { static const intptr_t kMinimumOldGenerationAllocationLimit = 8 * (Page::kPageSize > MB ? Page::kPageSize : MB); + static const int kLumpOfMemory = (i::kPointerSize / 4) * i::MB; + + // The new space size has to be a power of 2. + static const int kMaxNewSpaceSizeLowMemoryDevice = 2 * kLumpOfMemory; + static const int kMaxNewSpaceSizeMediumMemoryDevice = 8 * kLumpOfMemory; + static const int kMaxNewSpaceSizeHighMemoryDevice = 16 * kLumpOfMemory; + static const int kMaxNewSpaceSizeHugeMemoryDevice = 16 * kLumpOfMemory; + + // The old space size has to be a multiple of Page::kPageSize. + static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kLumpOfMemory; + static const int kMaxOldSpaceSizeMediumMemoryDevice = 256 * kLumpOfMemory; + static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kLumpOfMemory; + static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kLumpOfMemory; + + // The executable size has to be a multiple of Page::kPageSize. + static const int kMaxExecutableSizeLowMemoryDevice = 128 * kLumpOfMemory; + static const int kMaxExecutableSizeMediumMemoryDevice = 256 * kLumpOfMemory; + static const int kMaxExecutableSizeHighMemoryDevice = 512 * kLumpOfMemory; + static const int kMaxExecutableSizeHugeMemoryDevice = 700 * kLumpOfMemory; + intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) { - const int divisor = FLAG_stress_compaction ? 10 : 1; - intptr_t limit = - Max(old_gen_size + old_gen_size / divisor, - kMinimumOldGenerationAllocationLimit); + intptr_t limit = FLAG_stress_compaction + ? old_gen_size + old_gen_size / 10 + : old_gen_size * old_space_growing_factor_; + limit = Max(limit, kMinimumOldGenerationAllocationLimit); limit += new_space_.Capacity(); intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2; return Min(limit, halfway_to_the_max); @@ -1651,11 +1150,6 @@ class Heap { // Generated code can treat direct references to this root as constant. bool RootCanBeTreatedAsConstant(RootListIndex root_index); - MUST_USE_RESULT MaybeObject* NumberToString( - Object* number, bool check_number_string_cache = true); - MUST_USE_RESULT MaybeObject* Uint32ToString( - uint32_t value, bool check_number_string_cache = true); - Map* MapForFixedTypedArray(ExternalArrayType array_type); RootListIndex RootIndexForFixedTypedArray( ExternalArrayType array_type); @@ -1782,20 +1276,6 @@ class Heap { return &incremental_marking_; } - bool IsSweepingComplete() { - return !mark_compact_collector()->IsConcurrentSweepingInProgress() && - old_data_space()->IsLazySweepingComplete() && - old_pointer_space()->IsLazySweepingComplete(); - } - - bool AdvanceSweepers(int step_size); - - bool EnsureSweepersProgressed(int step_size) { - bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size); - sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size); - return sweeping_complete; - } - ExternalStringTable* external_string_table() { return &external_string_table_; } @@ -1932,9 +1412,10 @@ class Heap { Heap* heap_; }; - MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep); + void AddWeakObjectToCodeDependency(Handle<Object> obj, + Handle<DependentCode> dep); - DependentCode* LookupWeakObjectToCodeDependency(Object* obj); + DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj); void InitializeWeakObjectToCodeTable() { set_weak_object_to_code_table(undefined_value()); @@ -1945,6 +1426,58 @@ class Heap { static void FatalProcessOutOfMemory(const char* location, bool take_snapshot = false); + protected: + // Methods made available to tests. + + // Allocates a JS Map in the heap. + MUST_USE_RESULT AllocationResult AllocateMap( + InstanceType instance_type, + int instance_size, + ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); + + // Allocates and initializes a new JavaScript object based on a + // constructor. + // If allocation_site is non-null, then a memento is emitted after the object + // that points to the site. + MUST_USE_RESULT AllocationResult AllocateJSObject( + JSFunction* constructor, + PretenureFlag pretenure = NOT_TENURED, + AllocationSite* allocation_site = NULL); + + // Allocates and initializes a new JavaScript object based on a map. + // Passing an allocation site means that a memento will be created that + // points to the site. + MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap( + Map* map, + PretenureFlag pretenure = NOT_TENURED, + bool alloc_props = true, + AllocationSite* allocation_site = NULL); + + // Allocated a HeapNumber from value. + MUST_USE_RESULT AllocationResult AllocateHeapNumber( + double value, PretenureFlag pretenure = NOT_TENURED); + + // Allocate a byte array of the specified length + MUST_USE_RESULT AllocationResult AllocateByteArray( + int length, + PretenureFlag pretenure = NOT_TENURED); + + // Allocates an arguments object - optionally with an elements array. + MUST_USE_RESULT AllocationResult AllocateArgumentsObject( + Object* callee, int length); + + // Copy the code and scope info part of the code object, but insert + // the provided data as the relocation information. + MUST_USE_RESULT AllocationResult CopyCode(Code* code, + Vector<byte> reloc_info); + + MUST_USE_RESULT AllocationResult CopyCode(Code* code); + + // Allocates a fixed array initialized with undefined values + MUST_USE_RESULT AllocationResult AllocateFixedArray( + int length, + PretenureFlag pretenure = NOT_TENURED); + private: Heap(); @@ -1962,6 +1495,11 @@ class Heap { intptr_t max_executable_size_; intptr_t maximum_committed_; + // The old space growing factor is used in the old space heap growing + // strategy. The new old space size is the current old space size times + // old_space_growing_factor_. + int old_space_growing_factor_; + // For keeping track of how much data has survived // scavenge since last new space expansion. int survived_since_last_expansion_; @@ -2188,17 +1726,25 @@ class Heap { return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE; } - // Allocate an uninitialized fixed array. - MUST_USE_RESULT MaybeObject* AllocateRawFixedArray( - int length, PretenureFlag pretenure); + // Allocate an uninitialized object. The memory is non-executable if the + // hardware and OS allow. This is the single choke-point for allocations + // performed by the runtime and should not be bypassed (to extend this to + // inlined allocations, use the Heap::DisableInlineAllocation() support). + MUST_USE_RESULT inline AllocationResult AllocateRaw( + int size_in_bytes, + AllocationSpace space, + AllocationSpace retry_space); - // Allocate an uninitialized fixed double array. - MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray( - int length, PretenureFlag pretenure); + // Allocates a heap object based on the map. + MUST_USE_RESULT AllocationResult Allocate( + Map* map, + AllocationSpace space, + AllocationSite* allocation_site = NULL); - // Allocate an initialized fixed array with the given filler value. - MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithFiller( - int length, PretenureFlag pretenure, Object* filler); + // Allocates a partial map for bootstrapping. + MUST_USE_RESULT AllocationResult AllocatePartialMap( + InstanceType instance_type, + int instance_size); // Initializes a JSObject based on its map. void InitializeJSObjectFromMap(JSObject* obj, @@ -2207,8 +1753,147 @@ class Heap { void InitializeAllocationMemento(AllocationMemento* memento, AllocationSite* allocation_site); + // Allocate a block of memory in the given space (filled with a filler). + // Used as a fall-back for generated code when the space is full. + MUST_USE_RESULT AllocationResult AllocateFillerObject(int size, + bool double_align, + AllocationSpace space); + + // Allocate an uninitialized fixed array. + MUST_USE_RESULT AllocationResult AllocateRawFixedArray( + int length, PretenureFlag pretenure); + + // Allocate an uninitialized fixed double array. + MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray( + int length, PretenureFlag pretenure); + + // Allocate an initialized fixed array with the given filler value. + MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller( + int length, PretenureFlag pretenure, Object* filler); + + // Allocate and partially initializes a String. There are two String + // encodings: ASCII and two byte. These functions allocate a string of the + // given length and set its map and length fields. The characters of the + // string are uninitialized. + MUST_USE_RESULT AllocationResult AllocateRawOneByteString( + int length, PretenureFlag pretenure); + MUST_USE_RESULT AllocationResult AllocateRawTwoByteString( + int length, PretenureFlag pretenure); + + // Allocates and fully initializes a String. There are two String + // encodings: ASCII and two byte. One should choose between the three string + // allocation functions based on the encoding of the string buffer used to + // initialized the string. + // - ...FromAscii initializes the string from a buffer that is ASCII + // encoded (it does not check that the buffer is ASCII encoded) and the + // result will be ASCII encoded. + // - ...FromUTF8 initializes the string from a buffer that is UTF-8 + // encoded. If the characters are all single-byte characters, the + // result will be ASCII encoded, otherwise it will converted to two + // byte. + // - ...FromTwoByte initializes the string from a buffer that is two-byte + // encoded. If the characters are all single-byte characters, the + // result will be converted to ASCII, otherwise it will be left as + // two-byte. + MUST_USE_RESULT AllocationResult AllocateStringFromUtf8Slow( + Vector<const char> str, + int non_ascii_start, + PretenureFlag pretenure = NOT_TENURED); + MUST_USE_RESULT AllocationResult AllocateStringFromTwoByte( + Vector<const uc16> str, + PretenureFlag pretenure = NOT_TENURED); + bool CreateInitialMaps(); - bool CreateInitialObjects(); + void CreateInitialObjects(); + + // Allocates an internalized string in old space based on the character + // stream. + MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8( + Vector<const char> str, + int chars, + uint32_t hash_field); + + MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString( + Vector<const uint8_t> str, + uint32_t hash_field); + + MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString( + Vector<const uc16> str, + uint32_t hash_field); + + template<bool is_one_byte, typename T> + MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl( + T t, int chars, uint32_t hash_field); + + template<typename T> + MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl( + T t, int chars, uint32_t hash_field); + + // Allocates an uninitialized fixed array. It must be filled by the caller. + MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length); + + // Make a copy of src and return it. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src); + + // Make a copy of src, set the map, and return the copy. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src, + Map* map); + + // Make a copy of src and return it. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray( + FixedDoubleArray* src); + + // Make a copy of src and return it. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray( + ConstantPoolArray* src); + + + // Computes a single character string where the character has code. + // A cache is used for ASCII codes. + MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode( + uint16_t code); + + // Allocate a symbol in old space. + MUST_USE_RESULT AllocationResult AllocateSymbol(); + + // Make a copy of src, set the map, and return the copy. + MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap( + ConstantPoolArray* src, Map* map); + + MUST_USE_RESULT AllocationResult AllocateConstantPoolArray( + int number_of_int64_entries, + int number_of_code_ptr_entries, + int number_of_heap_ptr_entries, + int number_of_int32_entries); + + // Allocates an external array of the specified length and type. + MUST_USE_RESULT AllocationResult AllocateExternalArray( + int length, + ExternalArrayType array_type, + void* external_pointer, + PretenureFlag pretenure); + + // Allocates a fixed typed array of the specified length and type. + MUST_USE_RESULT AllocationResult AllocateFixedTypedArray( + int length, + ExternalArrayType array_type, + PretenureFlag pretenure); + + // Make a copy of src and return it. + MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src); + + // Make a copy of src, set the map, and return the copy. + MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap( + FixedDoubleArray* src, Map* map); + + // Allocates a fixed double array with uninitialized values. Returns + MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray( + int length, + PretenureFlag pretenure = NOT_TENURED); // These five Create*EntryStub functions are here and forced to not be inlined // because of a gcc-4.4 bug that assigns wrong vtable entries. @@ -2217,41 +1902,39 @@ class Heap { void CreateFixedStubs(); - MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string, - Object* to_number, - byte kind); - - // Allocate a JSArray with no elements - MUST_USE_RESULT MaybeObject* AllocateJSArray( - ElementsKind elements_kind, - PretenureFlag pretenure = NOT_TENURED); - // Allocate empty fixed array. - MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray(); + MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray(); // Allocate empty external array of given type. - MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray( + MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray( ExternalArrayType array_type); // Allocate empty fixed typed array of given type. - MUST_USE_RESULT MaybeObject* AllocateEmptyFixedTypedArray( + MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray( ExternalArrayType array_type); - // Allocate empty fixed double array. - MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray(); - // Allocate empty constant pool array. - MUST_USE_RESULT MaybeObject* AllocateEmptyConstantPoolArray(); + MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray(); // Allocate a tenured simple cell. - MUST_USE_RESULT MaybeObject* AllocateCell(Object* value); + MUST_USE_RESULT AllocationResult AllocateCell(Object* value); // Allocate a tenured JS global property cell initialized with the hole. - MUST_USE_RESULT MaybeObject* AllocatePropertyCell(); + MUST_USE_RESULT AllocationResult AllocatePropertyCell(); + + // Allocates a new utility object in the old generation. + MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type); + + // Allocates a new foreign object. + MUST_USE_RESULT AllocationResult AllocateForeign( + Address address, PretenureFlag pretenure = NOT_TENURED); + + MUST_USE_RESULT AllocationResult AllocateCode(int object_size, + bool immovable); - // Allocate Box. - MUST_USE_RESULT MaybeObject* AllocateBox(Object* value, - PretenureFlag pretenure); + MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key); + + MUST_USE_RESULT AllocationResult InternalizeString(String* str); // Performs a minor collection in new generation. void Scavenge(); @@ -2303,34 +1986,16 @@ class Heap { // Slow part of scavenge object. static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); - // Initializes a function with a shared part and prototype. - // Note: this code was factored out of AllocateFunction such that - // other parts of the VM could use it. Specifically, a function that creates - // instances of type JS_FUNCTION_TYPE benefit from the use of this function. - // Please note this does not perform a garbage collection. - inline void InitializeFunction( - JSFunction* function, - SharedFunctionInfo* shared, - Object* prototype); - // Total RegExp code ever generated double total_regexp_code_generated_; GCTracer* tracer_; - // Allocates a small number to string cache. - MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache(); // Creates and installs the full-sized number string cache. - void AllocateFullSizeNumberStringCache(); - // Get the length of the number to string cache based on the max semispace - // size. int FullSizeNumberStringCacheLength(); // Flush the number to string cache. void FlushNumberStringCache(); - // Allocates a fixed-size allocation sites scratchpad. - MUST_USE_RESULT MaybeObject* AllocateAllocationSitesScratchpad(); - // Sets used allocation sites entries to undefined. void FlushAllocationSitesScratchpad(); @@ -2746,10 +2411,10 @@ class HeapIterator BASE_EMBEDDED { class KeyedLookupCache { public: // Lookup field offset for (map, name). If absent, -1 is returned. - int Lookup(Map* map, Name* name); + int Lookup(Handle<Map> map, Handle<Name> name); // Update an element in the cache. - void Update(Map* map, Name* name, int field_offset); + void Update(Handle<Map> map, Handle<Name> name, int field_offset); // Clear the cache. void Clear(); @@ -2774,7 +2439,7 @@ class KeyedLookupCache { } } - static inline int Hash(Map* map, Name* name); + static inline int Hash(Handle<Map> map, Handle<Name> name); // Get the address of the keys and field_offsets arrays. Used in // generated code to perform cache lookups. @@ -3019,10 +2684,10 @@ class RegExpResultsCache { ResultsCacheType type); // Attempt to add value_array to the cache specified by type. On success, // value_array is turned into a COW-array. - static void Enter(Heap* heap, - String* key_string, - Object* key_pattern, - FixedArray* value_array, + static void Enter(Isolate* isolate, + Handle<String> key_string, + Handle<Object> key_pattern, + Handle<FixedArray> value_array, ResultsCacheType type); static void Clear(FixedArray* cache); static const int kRegExpResultsCacheSize = 0x100; diff --git a/deps/v8/src/hydrogen-alias-analysis.h b/deps/v8/src/hydrogen-alias-analysis.h index 21a54625f..10325d2ec 100644 --- a/deps/v8/src/hydrogen-alias-analysis.h +++ b/deps/v8/src/hydrogen-alias-analysis.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_ALIAS_ANALYSIS_H_ #define V8_HYDROGEN_ALIAS_ANALYSIS_H_ diff --git a/deps/v8/src/hydrogen-bce.cc b/deps/v8/src/hydrogen-bce.cc index dd71078a2..1f3f449a0 100644 --- a/deps/v8/src/hydrogen-bce.cc +++ b/deps/v8/src/hydrogen-bce.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-bce.h" diff --git a/deps/v8/src/hydrogen-bce.h b/deps/v8/src/hydrogen-bce.h index c55dea7b7..6e501701f 100644 --- a/deps/v8/src/hydrogen-bce.h +++ b/deps/v8/src/hydrogen-bce.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_BCE_H_ #define V8_HYDROGEN_BCE_H_ diff --git a/deps/v8/src/hydrogen-bch.cc b/deps/v8/src/hydrogen-bch.cc index a0a0fee71..e889c196f 100644 --- a/deps/v8/src/hydrogen-bch.cc +++ b/deps/v8/src/hydrogen-bch.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-bch.h" diff --git a/deps/v8/src/hydrogen-bch.h b/deps/v8/src/hydrogen-bch.h index a22dacdd4..28d540157 100644 --- a/deps/v8/src/hydrogen-bch.h +++ b/deps/v8/src/hydrogen-bch.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_BCH_H_ #define V8_HYDROGEN_BCH_H_ diff --git a/deps/v8/src/hydrogen-canonicalize.cc b/deps/v8/src/hydrogen-canonicalize.cc index d3f72e933..77f4a55eb 100644 --- a/deps/v8/src/hydrogen-canonicalize.cc +++ b/deps/v8/src/hydrogen-canonicalize.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-canonicalize.h" #include "hydrogen-redundant-phi.h" diff --git a/deps/v8/src/hydrogen-canonicalize.h b/deps/v8/src/hydrogen-canonicalize.h index d2b289bc2..a0737ed63 100644 --- a/deps/v8/src/hydrogen-canonicalize.h +++ b/deps/v8/src/hydrogen-canonicalize.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_CANONICALIZE_H_ #define V8_HYDROGEN_CANONICALIZE_H_ diff --git a/deps/v8/src/hydrogen-check-elimination.cc b/deps/v8/src/hydrogen-check-elimination.cc index 52a549299..701d9654b 100644 --- a/deps/v8/src/hydrogen-check-elimination.cc +++ b/deps/v8/src/hydrogen-check-elimination.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-check-elimination.h" #include "hydrogen-alias-analysis.h" @@ -44,7 +21,7 @@ namespace v8 { namespace internal { -typedef UniqueSet<Map>* MapSet; +typedef const UniqueSet<Map>* MapSet; struct HCheckTableEntry { HValue* object_; // The object being approximated. NULL => invalid entry. @@ -57,7 +34,7 @@ struct HCheckTableEntry { // set of known maps for each object. class HCheckTable : public ZoneObject { public: - static const int kMaxTrackedObjects = 10; + static const int kMaxTrackedObjects = 16; explicit HCheckTable(HCheckEliminationPhase* phase) : phase_(phase), @@ -72,10 +49,6 @@ class HCheckTable : public ZoneObject { ReduceCheckMaps(HCheckMaps::cast(instr)); break; } - case HValue::kCheckValue: { - ReduceCheckValue(HCheckValue::cast(instr)); - break; - } case HValue::kLoadNamedField: { ReduceLoadNamedField(HLoadNamedField::cast(instr)); break; @@ -97,17 +70,14 @@ class HCheckTable : public ZoneObject { HTransitionElementsKind::cast(instr)); break; } - case HValue::kCheckMapValue: { - ReduceCheckMapValue(HCheckMapValue::cast(instr)); - break; - } case HValue::kCheckHeapObject: { ReduceCheckHeapObject(HCheckHeapObject::cast(instr)); break; } default: { // If the instruction changes maps uncontrollably, drop everything. - if (instr->CheckChangesFlag(kMaps) || + if (instr->CheckChangesFlag(kElementsKind) || + instr->CheckChangesFlag(kMaps) || instr->CheckChangesFlag(kOsrEntries)) { Kill(); } @@ -154,13 +124,13 @@ class HCheckTable : public ZoneObject { private: // Copy state to successor block. HCheckTable* Copy(HBasicBlock* succ, HBasicBlock* from_block, Zone* zone) { - HCheckTable* copy = new(phase_->zone()) HCheckTable(phase_); + HCheckTable* copy = new(zone) HCheckTable(phase_); for (int i = 0; i < size_; i++) { HCheckTableEntry* old_entry = &entries_[i]; ASSERT(old_entry->maps_->size() > 0); HCheckTableEntry* new_entry = ©->entries_[i]; new_entry->object_ = old_entry->object_; - new_entry->maps_ = old_entry->maps_->Copy(phase_->zone()); + new_entry->maps_ = old_entry->maps_; // Keep the check if the existing check's block dominates the successor. if (old_entry->check_ != NULL && old_entry->check_->block()->Dominates(succ)) { @@ -186,7 +156,7 @@ class HCheckTable : public ZoneObject { HCheckTableEntry* pred_entry = copy->Find(phi_operand); if (pred_entry != NULL) { // Create an entry for a phi in the table. - copy->Insert(phi, NULL, pred_entry->maps_->Copy(phase_->zone())); + copy->Insert(phi, NULL, pred_entry->maps_); } } } @@ -206,15 +176,15 @@ class HCheckTable : public ZoneObject { if (entry == NULL) { copy->Insert(object, cmp, cmp->map()); } else { - MapSet list = new(phase_->zone()) UniqueSet<Map>(); - list->Add(cmp->map(), phase_->zone()); - entry->maps_ = list; + entry->maps_ = new(zone) UniqueSet<Map>(cmp->map(), zone); entry->check_ = cmp; } } else { // Learn on the false branch of if(CompareMap(x)). if (entry != NULL) { - entry->maps_->Remove(cmp->map()); + UniqueSet<Map>* maps = entry->maps_->Copy(zone); + maps->Remove(cmp->map()); + entry->maps_ = maps; } } learned = true; @@ -228,14 +198,12 @@ class HCheckTable : public ZoneObject { HCheckTableEntry* re = copy->Find(right); if (le == NULL) { if (re != NULL) { - copy->Insert(left, NULL, re->maps_->Copy(zone)); + copy->Insert(left, NULL, re->maps_); } } else if (re == NULL) { - copy->Insert(right, NULL, le->maps_->Copy(zone)); + copy->Insert(right, NULL, le->maps_); } else { - MapSet intersect = le->maps_->Intersect(re->maps_, zone); - le->maps_ = intersect; - re->maps_ = intersect->Copy(zone); + le->maps_ = re->maps_ = le->maps_->Intersect(re->maps_, zone); } learned = true; } @@ -281,7 +249,7 @@ class HCheckTable : public ZoneObject { compact = true; } else { this_entry->maps_ = - this_entry->maps_->Union(that_entry->maps_, phase_->zone()); + this_entry->maps_->Union(that_entry->maps_, zone); if (this_entry->check_ != that_entry->check_) { this_entry->check_ = NULL; } @@ -305,7 +273,7 @@ class HCheckTable : public ZoneObject { if (entry != NULL) { // entry found; MapSet a = entry->maps_; - MapSet i = instr->map_set().Copy(phase_->zone()); + const UniqueSet<Map>* i = instr->maps(); if (a->IsSubset(i)) { // The first check is more strict; the second is redundant. if (entry->check_ != NULL) { @@ -324,7 +292,8 @@ class HCheckTable : public ZoneObject { } return; } - MapSet intersection = i->Intersect(a, phase_->zone()); + HGraph* graph = instr->block()->graph(); + MapSet intersection = i->Intersect(a, graph->zone()); if (intersection->size() == 0) { // Intersection is empty; probably megamorphic, which is likely to // deopt anyway, so just leave things as they are. @@ -334,7 +303,6 @@ class HCheckTable : public ZoneObject { entry->maps_ = intersection; if (intersection->size() != i->size()) { // Narrow set of maps in the second check maps instruction. - HGraph* graph = instr->block()->graph(); if (entry->check_ != NULL && entry->check_->block() == instr->block() && entry->check_->IsCheckMaps()) { @@ -344,7 +312,7 @@ class HCheckTable : public ZoneObject { TRACE(("CheckMaps #%d at B%d narrowed\n", check->id(), check->block()->block_id())); // Update map set and ensure that the check is alive. - check->set_map_set(intersection, graph->zone()); + check->set_maps(intersection); check->ClearFlag(HValue::kIsDead); TRACE(("Replacing redundant CheckMaps #%d at B%d with #%d\n", instr->id(), instr->block()->block_id(), entry->check_->id())); @@ -352,7 +320,7 @@ class HCheckTable : public ZoneObject { } else { TRACE(("CheckMaps #%d at B%d narrowed\n", instr->id(), instr->block()->block_id())); - instr->set_map_set(intersection, graph->zone()); + instr->set_maps(intersection); entry->check_ = instr; } @@ -364,25 +332,21 @@ class HCheckTable : public ZoneObject { } } else { // No entry; insert a new one. - Insert(object, instr, instr->map_set().Copy(phase_->zone())); - } - } - - void ReduceCheckValue(HCheckValue* instr) { - // Canonicalize HCheckValues; they might have their values load-eliminated. - HValue* value = instr->Canonicalize(); - if (value == NULL) { - instr->DeleteAndReplaceWith(instr->value()); - INC_STAT(removed_); - } else if (value != instr) { - instr->DeleteAndReplaceWith(value); - INC_STAT(redundant_); + Insert(object, instr, instr->maps()); } } void ReduceLoadNamedField(HLoadNamedField* instr) { // Reduce a load of the map field when it is known to be a constant. - if (!IsMapAccess(instr->access())) return; + if (!instr->access().IsMap()) { + // Check if we introduce field maps here. + MapSet maps = instr->maps(); + if (maps != NULL) { + ASSERT_NE(0, maps->size()); + Insert(instr, NULL, maps); + } + return; + } HValue* object = instr->object()->ActualValue(); MapSet maps = FindMaps(object); @@ -395,41 +359,6 @@ class HCheckTable : public ZoneObject { INC_STAT(loads_); } - void ReduceCheckMapValue(HCheckMapValue* instr) { - if (!instr->map()->IsConstant()) return; // Nothing to learn. - - HValue* object = instr->value()->ActualValue(); - // Match a HCheckMapValue(object, HConstant(map)) - Unique<Map> map = MapConstant(instr->map()); - - HCheckTableEntry* entry = Find(object); - if (entry != NULL) { - MapSet maps = entry->maps_; - if (maps->Contains(map)) { - if (maps->size() == 1) { - // Object is known to have exactly this map. - if (entry->check_ != NULL) { - instr->DeleteAndReplaceWith(entry->check_); - } else { - // Mark check as dead but leave it in the graph as a checkpoint for - // subsequent checks. - instr->SetFlag(HValue::kIsDead); - entry->check_ = instr; - } - INC_STAT(removed_); - } else { - // Only one map survives the check. - maps->Clear(); - maps->Add(map, phase_->zone()); - entry->check_ = instr; - } - } - } else { - // No prior information. - Insert(object, instr, map); - } - } - void ReduceCheckHeapObject(HCheckHeapObject* instr) { if (FindMaps(instr->value()->ActualValue()) != NULL) { // If the object has known maps, it's definitely a heap object. @@ -443,12 +372,12 @@ class HCheckTable : public ZoneObject { if (instr->has_transition()) { // This store transitions the object to a new map. Kill(object); - Insert(object, NULL, MapConstant(instr->transition())); - } else if (IsMapAccess(instr->access())) { + Insert(object, NULL, HConstant::cast(instr->transition())->MapValue()); + } else if (instr->access().IsMap()) { // This is a store directly to the map field of the object. Kill(object); if (!instr->value()->IsConstant()) return; - Insert(object, NULL, MapConstant(instr->value())); + Insert(object, NULL, HConstant::cast(instr->value())->MapValue()); } else { // If the instruction changes maps, it should be handled above. CHECK(!instr->CheckChangesFlag(kMaps)); @@ -488,7 +417,7 @@ class HCheckTable : public ZoneObject { if (maps_left == NULL) return; MapSet maps_right = FindMaps(instr->right()->ActualValue()); if (maps_right == NULL) return; - MapSet intersection = maps_left->Intersect(maps_right, phase_->zone()); + MapSet intersection = maps_left->Intersect(maps_right, zone()); if (intersection->size() > 0) return; TRACE(("Marking redundant CompareObjectEqAndBranch #%d at B%d as false\n", @@ -501,13 +430,15 @@ class HCheckTable : public ZoneObject { } void ReduceTransitionElementsKind(HTransitionElementsKind* instr) { - MapSet maps = FindMaps(instr->object()->ActualValue()); + HCheckTableEntry* entry = Find(instr->object()->ActualValue()); // Can only learn more about an object that already has a known set of maps. - if (maps == NULL) return; - if (maps->Contains(instr->original_map())) { + if (entry == NULL) return; + if (entry->maps_->Contains(instr->original_map())) { // If the object has the original map, it will be transitioned. + UniqueSet<Map>* maps = entry->maps_->Copy(zone()); maps->Remove(instr->original_map()); - maps->Add(instr->transitioned_map(), phase_->zone()); + maps->Add(instr->transitioned_map(), zone()); + entry->maps_ = maps; } else { // Object does not have the given map, thus the transition is redundant. instr->DeleteAndReplaceWith(instr->object()); @@ -608,9 +539,7 @@ class HCheckTable : public ZoneObject { } void Insert(HValue* object, HInstruction* check, Unique<Map> map) { - MapSet list = new(phase_->zone()) UniqueSet<Map>(); - list->Add(map, phase_->zone()); - Insert(object, check, list); + Insert(object, check, new(zone()) UniqueSet<Map>(map, zone())); } void Insert(HValue* object, HInstruction* check, MapSet maps) { @@ -623,13 +552,7 @@ class HCheckTable : public ZoneObject { if (size_ < kMaxTrackedObjects) size_++; } - bool IsMapAccess(HObjectAccess access) { - return access.IsInobject() && access.offset() == JSObject::kMapOffset; - } - - Unique<Map> MapConstant(HValue* value) { - return Unique<Map>::cast(HConstant::cast(value)->GetUnique()); - } + Zone* zone() const { return phase_->zone(); } friend class HCheckMapsEffects; friend class HCheckEliminationPhase; @@ -647,26 +570,28 @@ class HCheckTable : public ZoneObject { class HCheckMapsEffects : public ZoneObject { public: explicit HCheckMapsEffects(Zone* zone) - : maps_stored_(false), - stores_(5, zone) { } + : objects_(0, zone), maps_stored_(false) {} - inline bool Disabled() { - return false; // Effects are _not_ disabled. - } + // Effects are _not_ disabled. + inline bool Disabled() const { return false; } // Process a possibly side-effecting instruction. void Process(HInstruction* instr, Zone* zone) { switch (instr->opcode()) { case HValue::kStoreNamedField: { - stores_.Add(HStoreNamedField::cast(instr), zone); + HStoreNamedField* store = HStoreNamedField::cast(instr); + if (store->access().IsMap() && store->has_transition()) { + objects_.Add(store->object(), zone); + } break; } - case HValue::kOsrEntry: { - // Kill everything. Loads must not be hoisted past the OSR entry. - maps_stored_ = true; + case HValue::kTransitionElementsKind: { + objects_.Add(HTransitionElementsKind::cast(instr)->object(), zone); + break; } default: { maps_stored_ |= (instr->CheckChangesFlag(kMaps) | + instr->CheckChangesFlag(kOsrEntries) | instr->CheckChangesFlag(kElementsKind)); } } @@ -680,26 +605,23 @@ class HCheckMapsEffects : public ZoneObject { return; } - // Kill maps for each store contained in these effects. - for (int i = 0; i < stores_.length(); i++) { - HStoreNamedField* s = stores_[i]; - if (table->IsMapAccess(s->access()) || s->has_transition()) { - table->Kill(s->object()->ActualValue()); - } + // Kill maps for each object contained in these effects. + for (int i = 0; i < objects_.length(); ++i) { + table->Kill(objects_[i]->ActualValue()); } } // Union these effects with the other effects. void Union(HCheckMapsEffects* that, Zone* zone) { maps_stored_ |= that->maps_stored_; - for (int i = 0; i < that->stores_.length(); i++) { - stores_.Add(that->stores_[i], zone); + for (int i = 0; i < that->objects_.length(); ++i) { + objects_.Add(that->objects_[i], zone); } } private: + ZoneList<HValue*> objects_; bool maps_stored_ : 1; - ZoneList<HStoreNamedField*> stores_; }; diff --git a/deps/v8/src/hydrogen-check-elimination.h b/deps/v8/src/hydrogen-check-elimination.h index b429b1746..b38447b1e 100644 --- a/deps/v8/src/hydrogen-check-elimination.h +++ b/deps/v8/src/hydrogen-check-elimination.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_CHECK_ELIMINATION_H_ #define V8_HYDROGEN_CHECK_ELIMINATION_H_ diff --git a/deps/v8/src/hydrogen-dce.cc b/deps/v8/src/hydrogen-dce.cc index 3a2eac40e..be551efa8 100644 --- a/deps/v8/src/hydrogen-dce.cc +++ b/deps/v8/src/hydrogen-dce.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-dce.h" #include "v8.h" diff --git a/deps/v8/src/hydrogen-dce.h b/deps/v8/src/hydrogen-dce.h index 2d73b380e..18cd755c9 100644 --- a/deps/v8/src/hydrogen-dce.h +++ b/deps/v8/src/hydrogen-dce.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_DCE_H_ #define V8_HYDROGEN_DCE_H_ diff --git a/deps/v8/src/hydrogen-dehoist.cc b/deps/v8/src/hydrogen-dehoist.cc index bdf2cfb25..44aeb4887 100644 --- a/deps/v8/src/hydrogen-dehoist.cc +++ b/deps/v8/src/hydrogen-dehoist.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-dehoist.h" diff --git a/deps/v8/src/hydrogen-dehoist.h b/deps/v8/src/hydrogen-dehoist.h index 140dc6e0e..930e29bdc 100644 --- a/deps/v8/src/hydrogen-dehoist.h +++ b/deps/v8/src/hydrogen-dehoist.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_DEHOIST_H_ #define V8_HYDROGEN_DEHOIST_H_ diff --git a/deps/v8/src/hydrogen-environment-liveness.cc b/deps/v8/src/hydrogen-environment-liveness.cc index d7501ac49..2dd1233de 100644 --- a/deps/v8/src/hydrogen-environment-liveness.cc +++ b/deps/v8/src/hydrogen-environment-liveness.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-environment-liveness.h" @@ -84,8 +61,8 @@ void HEnvironmentLivenessAnalysisPhase::ZapEnvironmentSlotsInSuccessors( } HSimulate* simulate = first_simulate_.at(successor_id); if (simulate == NULL) continue; - ASSERT(simulate->closure().is_identical_to( - block->last_environment()->closure())); + ASSERT(VerifyClosures(simulate->closure(), + block->last_environment()->closure())); ZapEnvironmentSlot(i, simulate); } } @@ -97,7 +74,7 @@ void HEnvironmentLivenessAnalysisPhase::ZapEnvironmentSlotsForInstruction( if (!marker->CheckFlag(HValue::kEndsLiveRange)) return; HSimulate* simulate = marker->next_simulate(); if (simulate != NULL) { - ASSERT(simulate->closure().is_identical_to(marker->closure())); + ASSERT(VerifyClosures(simulate->closure(), marker->closure())); ZapEnvironmentSlot(marker->index(), simulate); } } @@ -241,4 +218,14 @@ void HEnvironmentLivenessAnalysisPhase::Run() { } } + +#ifdef DEBUG +bool HEnvironmentLivenessAnalysisPhase::VerifyClosures( + Handle<JSFunction> a, Handle<JSFunction> b) { + Heap::RelocationLock for_heap_access(isolate()->heap()); + AllowHandleDereference for_verification; + return a.is_identical_to(b); +} +#endif + } } // namespace v8::internal diff --git a/deps/v8/src/hydrogen-environment-liveness.h b/deps/v8/src/hydrogen-environment-liveness.h index 248ec5ce5..c72cd0f33 100644 --- a/deps/v8/src/hydrogen-environment-liveness.h +++ b/deps/v8/src/hydrogen-environment-liveness.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_ENVIRONMENT_LIVENESS_H_ #define V8_HYDROGEN_ENVIRONMENT_LIVENESS_H_ @@ -55,6 +32,9 @@ class HEnvironmentLivenessAnalysisPhase : public HPhase { void ZapEnvironmentSlotsForInstruction(HEnvironmentMarker* marker); void UpdateLivenessAtBlockEnd(HBasicBlock* block, BitVector* live); void UpdateLivenessAtInstruction(HInstruction* instr, BitVector* live); +#ifdef DEBUG + bool VerifyClosures(Handle<JSFunction> a, Handle<JSFunction> b); +#endif int block_count_; diff --git a/deps/v8/src/hydrogen-escape-analysis.cc b/deps/v8/src/hydrogen-escape-analysis.cc index 102301992..8514d889f 100644 --- a/deps/v8/src/hydrogen-escape-analysis.cc +++ b/deps/v8/src/hydrogen-escape-analysis.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-escape-analysis.h" @@ -155,12 +132,29 @@ HValue* HEscapeAnalysisPhase::NewMapCheckAndInsert(HCapturedObject* state, // TODO(mstarzinger): This will narrow a map check against a set of maps // down to the first element in the set. Revisit and fix this. HCheckValue* check = HCheckValue::New( - zone, NULL, value, mapcheck->first_map(), false); + zone, NULL, value, mapcheck->maps()->at(0), false); check->InsertBefore(mapcheck); return check; } +// Replace a field load with a given value, forcing Smi representation if +// necessary. +HValue* HEscapeAnalysisPhase::NewLoadReplacement( + HLoadNamedField* load, HValue* load_value) { + HValue* replacement = load_value; + Representation representation = load->representation(); + if (representation.IsSmi()) { + Zone* zone = graph()->zone(); + HInstruction* new_instr = + HForceRepresentation::New(zone, NULL, load_value, representation); + new_instr->InsertAfter(load); + replacement = new_instr; + } + return replacement; +} + + // Performs a forward data-flow analysis of all loads and stores on the // given captured allocation. This uses a reverse post-order iteration // over affected basic blocks. All non-escaping instructions are handled @@ -196,10 +190,11 @@ void HEscapeAnalysisPhase::AnalyzeDataFlow(HInstruction* allocate) { int index = load->access().offset() / kPointerSize; if (load->object() != allocate) continue; ASSERT(load->access().IsInobject()); - HValue* replacement = state->OperandAt(index); + HValue* replacement = + NewLoadReplacement(load, state->OperandAt(index)); load->DeleteAndReplaceWith(replacement); if (FLAG_trace_escape_analysis) { - PrintF("Replacing load #%d with #%d (%s)\n", instr->id(), + PrintF("Replacing load #%d with #%d (%s)\n", load->id(), replacement->id(), replacement->Mnemonic()); } break; diff --git a/deps/v8/src/hydrogen-escape-analysis.h b/deps/v8/src/hydrogen-escape-analysis.h index 3e27cc1b4..5ff0b32d3 100644 --- a/deps/v8/src/hydrogen-escape-analysis.h +++ b/deps/v8/src/hydrogen-escape-analysis.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_ESCAPE_ANALYSIS_H_ #define V8_HYDROGEN_ESCAPE_ANALYSIS_H_ @@ -62,6 +39,8 @@ class HEscapeAnalysisPhase : public HPhase { HValue* NewMapCheckAndInsert(HCapturedObject* state, HCheckMaps* mapcheck); + HValue* NewLoadReplacement(HLoadNamedField* load, HValue* load_value); + HCapturedObject* StateAt(HBasicBlock* block) { return block_states_.at(block->block_id()); } diff --git a/deps/v8/src/hydrogen-flow-engine.h b/deps/v8/src/hydrogen-flow-engine.h index 99a2f841a..04902b297 100644 --- a/deps/v8/src/hydrogen-flow-engine.h +++ b/deps/v8/src/hydrogen-flow-engine.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_FLOW_ENGINE_H_ #define V8_HYDROGEN_FLOW_ENGINE_H_ diff --git a/deps/v8/src/hydrogen-gvn.cc b/deps/v8/src/hydrogen-gvn.cc index 4c98015be..f9d1b408a 100644 --- a/deps/v8/src/hydrogen-gvn.cc +++ b/deps/v8/src/hydrogen-gvn.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen.h" #include "hydrogen-gvn.h" diff --git a/deps/v8/src/hydrogen-gvn.h b/deps/v8/src/hydrogen-gvn.h index d00dd0558..25a5c24da 100644 --- a/deps/v8/src/hydrogen-gvn.h +++ b/deps/v8/src/hydrogen-gvn.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_GVN_H_ #define V8_HYDROGEN_GVN_H_ diff --git a/deps/v8/src/hydrogen-infer-representation.cc b/deps/v8/src/hydrogen-infer-representation.cc index f61649a68..3e983476f 100644 --- a/deps/v8/src/hydrogen-infer-representation.cc +++ b/deps/v8/src/hydrogen-infer-representation.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-infer-representation.h" diff --git a/deps/v8/src/hydrogen-infer-representation.h b/deps/v8/src/hydrogen-infer-representation.h index 7c605696c..a2c8466f9 100644 --- a/deps/v8/src/hydrogen-infer-representation.h +++ b/deps/v8/src/hydrogen-infer-representation.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_INFER_REPRESENTATION_H_ #define V8_HYDROGEN_INFER_REPRESENTATION_H_ diff --git a/deps/v8/src/hydrogen-infer-types.cc b/deps/v8/src/hydrogen-infer-types.cc index 01c608473..7a3208b67 100644 --- a/deps/v8/src/hydrogen-infer-types.cc +++ b/deps/v8/src/hydrogen-infer-types.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-infer-types.h" diff --git a/deps/v8/src/hydrogen-infer-types.h b/deps/v8/src/hydrogen-infer-types.h index cfcbf3549..d1abcdac5 100644 --- a/deps/v8/src/hydrogen-infer-types.h +++ b/deps/v8/src/hydrogen-infer-types.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_INFER_TYPES_H_ #define V8_HYDROGEN_INFER_TYPES_H_ diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc index 84dcb1824..f5c5c32f4 100644 --- a/deps/v8/src/hydrogen-instructions.cc +++ b/deps/v8/src/hydrogen-instructions.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -509,6 +486,8 @@ bool HValue::CanReplaceWithDummyUses() { !(block()->IsReachable() || IsBlockEntry() || IsControlInstruction() || + IsArgumentsObject() || + IsCapturedObject() || IsSimulate() || IsEnterInlined() || IsLeaveInlined()); @@ -588,17 +567,6 @@ void HValue::PrintTypeTo(StringStream* stream) { } -void HValue::PrintRangeTo(StringStream* stream) { - if (range() == NULL || range()->IsMostGeneric()) return; - // Note: The c1visualizer syntax for locals allows only a sequence of the - // following characters: A-Za-z0-9_-|: - stream->Add(" range:%d_%d%s", - range()->lower(), - range()->upper(), - range()->CanBeMinusZero() ? "_m0" : ""); -} - - void HValue::PrintChangesTo(StringStream* stream) { GVNFlagSet changes_flags = ChangesFlags(); if (changes_flags.IsEmpty()) return; @@ -699,7 +667,6 @@ void HSourcePosition::PrintTo(FILE* out) { void HInstruction::PrintTo(StringStream* stream) { PrintMnemonicTo(stream); PrintDataTo(stream); - PrintRangeTo(stream); PrintChangesTo(stream); PrintTypeTo(stream); if (CheckFlag(HValue::kHasNoObservableSideEffects)) { @@ -841,104 +808,133 @@ void HInstruction::Verify() { #endif -static bool HasPrimitiveRepresentation(HValue* instr) { - return instr->representation().IsInteger32() || - instr->representation().IsDouble(); -} - - bool HInstruction::CanDeoptimize() { // TODO(titzer): make this a virtual method? switch (opcode()) { + case HValue::kAbnormalExit: case HValue::kAccessArgumentsAt: - case HValue::kApplyArguments: + case HValue::kAllocate: case HValue::kArgumentsElements: case HValue::kArgumentsLength: case HValue::kArgumentsObject: + case HValue::kBlockEntry: case HValue::kBoundsCheckBaseIndexInformation: + case HValue::kCallFunction: + case HValue::kCallJSFunction: + case HValue::kCallNew: + case HValue::kCallNewArray: + case HValue::kCallStub: + case HValue::kCallWithDescriptor: case HValue::kCapturedObject: - case HValue::kClampToUint8: + case HValue::kClassOfTestAndBranch: + case HValue::kCompareGeneric: + case HValue::kCompareHoleAndBranch: + case HValue::kCompareMap: + case HValue::kCompareMinusZeroAndBranch: + case HValue::kCompareNumericAndBranch: + case HValue::kCompareObjectEqAndBranch: case HValue::kConstant: + case HValue::kConstructDouble: case HValue::kContext: - case HValue::kDateField: case HValue::kDebugBreak: case HValue::kDeclareGlobals: - case HValue::kDiv: + case HValue::kDoubleBits: case HValue::kDummyUse: case HValue::kEnterInlined: case HValue::kEnvironmentMarker: - case HValue::kForInCacheArray: - case HValue::kForInPrepareMap: - case HValue::kFunctionLiteral: + case HValue::kForceRepresentation: case HValue::kGetCachedArrayIndex: case HValue::kGoto: + case HValue::kHasCachedArrayIndexAndBranch: + case HValue::kHasInstanceTypeAndBranch: case HValue::kInnerAllocatedObject: case HValue::kInstanceOf: case HValue::kInstanceOfKnownGlobal: - case HValue::kInvokeFunction: + case HValue::kIsConstructCallAndBranch: + case HValue::kIsObjectAndBranch: + case HValue::kIsSmiAndBranch: + case HValue::kIsStringAndBranch: + case HValue::kIsUndetectableAndBranch: case HValue::kLeaveInlined: - case HValue::kLoadContextSlot: case HValue::kLoadFieldByIndex: - case HValue::kLoadFunctionPrototype: - case HValue::kLoadGlobalCell: case HValue::kLoadGlobalGeneric: - case HValue::kLoadKeyed: - case HValue::kLoadKeyedGeneric: case HValue::kLoadNamedField: case HValue::kLoadNamedGeneric: case HValue::kLoadRoot: case HValue::kMapEnumLength: - case HValue::kMathFloorOfDiv: case HValue::kMathMinMax: - case HValue::kMod: - case HValue::kMul: - case HValue::kOsrEntry: case HValue::kParameter: - case HValue::kPower: + case HValue::kPhi: case HValue::kPushArgument: + case HValue::kRegExpLiteral: + case HValue::kReturn: case HValue::kRor: case HValue::kSar: case HValue::kSeqStringGetChar: + case HValue::kStoreCodeEntry: + case HValue::kStoreKeyed: + case HValue::kStoreNamedGeneric: + case HValue::kStringCharCodeAt: + case HValue::kStringCharFromCode: + case HValue::kThisFunction: + case HValue::kTypeofIsAndBranch: + case HValue::kUnknownOSRValue: + case HValue::kUseConst: + return false; + + case HValue::kAdd: + case HValue::kApplyArguments: + case HValue::kBitwise: + case HValue::kBoundsCheck: + case HValue::kBranch: + case HValue::kCallRuntime: + case HValue::kChange: + case HValue::kCheckHeapObject: + case HValue::kCheckInstanceType: + case HValue::kCheckMapValue: + case HValue::kCheckMaps: + case HValue::kCheckSmi: + case HValue::kCheckValue: + case HValue::kClampToUint8: + case HValue::kDateField: + case HValue::kDeoptimize: + case HValue::kDiv: + case HValue::kForInCacheArray: + case HValue::kForInPrepareMap: + case HValue::kFunctionLiteral: + case HValue::kInvokeFunction: + case HValue::kLoadContextSlot: + case HValue::kLoadFunctionPrototype: + case HValue::kLoadGlobalCell: + case HValue::kLoadKeyed: + case HValue::kLoadKeyedGeneric: + case HValue::kMathFloorOfDiv: + case HValue::kMod: + case HValue::kMul: + case HValue::kOsrEntry: + case HValue::kPower: case HValue::kSeqStringSetChar: case HValue::kShl: case HValue::kShr: case HValue::kSimulate: case HValue::kStackCheck: - case HValue::kStoreCodeEntry: case HValue::kStoreContextSlot: case HValue::kStoreGlobalCell: - case HValue::kStoreKeyed: case HValue::kStoreKeyedGeneric: case HValue::kStoreNamedField: - case HValue::kStoreNamedGeneric: case HValue::kStringAdd: - case HValue::kStringCharCodeAt: - case HValue::kStringCharFromCode: + case HValue::kStringCompareAndBranch: case HValue::kSub: - case HValue::kThisFunction: case HValue::kToFastProperties: case HValue::kTransitionElementsKind: case HValue::kTrapAllocationMemento: case HValue::kTypeof: case HValue::kUnaryMathOperation: - case HValue::kUseConst: case HValue::kWrapReceiver: - return false; - case HValue::kForceRepresentation: - case HValue::kAdd: - case HValue::kBitwise: - case HValue::kChange: - case HValue::kCompareGeneric: - // These instructions might deoptimize if they are not primitive. - if (!HasPrimitiveRepresentation(this)) return true; - for (int i = 0; i < OperandCount(); i++) { - HValue* input = OperandAt(i); - if (!HasPrimitiveRepresentation(input)) return true; - } - return false; - default: return true; } + UNREACHABLE(); + return true; } @@ -1228,6 +1224,13 @@ bool HBranch::KnownSuccessorBlock(HBasicBlock** block) { } +void HBranch::PrintDataTo(StringStream* stream) { + HUnaryControlInstruction::PrintDataTo(stream); + stream->Add(" "); + expected_input_types().Print(stream); +} + + void HCompareMap::PrintDataTo(StringStream* stream) { value()->PrintNameTo(stream); stream->Add(" (%p)", *map().handle()); @@ -1377,6 +1380,17 @@ void HCheckMapValue::PrintDataTo(StringStream* stream) { } +HValue* HCheckMapValue::Canonicalize() { + if (map()->IsConstant()) { + HConstant* c_map = HConstant::cast(map()); + return HCheckMaps::CreateAndInsertAfter( + block()->graph()->zone(), value(), c_map->MapValue(), + c_map->HasStableMapValue(), this); + } + return this; +} + + void HForInPrepareMap::PrintDataTo(StringStream* stream) { enumerable()->PrintNameTo(stream); } @@ -1561,6 +1575,7 @@ void HChange::PrintDataTo(StringStream* stream) { HUnaryOperation::PrintDataTo(stream); stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic()); + if (CanTruncateToSmi()) stream->Add(" truncating-smi"); if (CanTruncateToInt32()) stream->Add(" truncating-int32"); if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan"); @@ -1665,32 +1680,33 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) { } -bool HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect, - HValue* dominator) { - ASSERT(side_effect == kMaps); - // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once - // type information is rich enough we should generalize this to any HType - // for which the map is known. - if (HasNoUses() && dominator->IsStoreNamedField()) { - HStoreNamedField* store = HStoreNamedField::cast(dominator); - if (!store->has_transition() || store->object() != value()) return false; - HConstant* transition = HConstant::cast(store->transition()); - if (map_set_.Contains(Unique<Map>::cast(transition->GetUnique()))) { - DeleteAndReplaceWith(NULL); - return true; - } +void HCheckMaps::PrintDataTo(StringStream* stream) { + value()->PrintNameTo(stream); + stream->Add(" [%p", *maps()->at(0).handle()); + for (int i = 1; i < maps()->size(); ++i) { + stream->Add(",%p", *maps()->at(i).handle()); } - return false; + stream->Add("]%s", IsStabilityCheck() ? "(stability-check)" : ""); } -void HCheckMaps::PrintDataTo(StringStream* stream) { - value()->PrintNameTo(stream); - stream->Add(" [%p", *map_set_.at(0).handle()); - for (int i = 1; i < map_set_.size(); ++i) { - stream->Add(",%p", *map_set_.at(i).handle()); +HValue* HCheckMaps::Canonicalize() { + if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) { + HConstant* c_value = HConstant::cast(value()); + if (c_value->HasObjectMap()) { + for (int i = 0; i < maps()->size(); ++i) { + if (c_value->ObjectMap() == maps()->at(i)) { + if (maps()->size() > 1) { + set_maps(new(block()->graph()->zone()) UniqueSet<Map>( + maps()->at(i), block()->graph()->zone())); + } + MarkAsStabilityCheck(); + break; + } + } + } } - stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : ""); + return this; } @@ -2467,7 +2483,6 @@ void HPhi::PrintTo(StringStream* stream) { int32_non_phi_uses() + int32_indirect_uses(), double_non_phi_uses() + double_indirect_uses(), tagged_non_phi_uses() + tagged_indirect_uses()); - PrintRangeTo(stream); PrintTypeTo(stream); stream->Add("]"); } @@ -2677,26 +2692,32 @@ static bool IsInteger32(double value) { } -HConstant::HConstant(Handle<Object> handle, Representation r) - : HTemplateInstruction<0>(HType::TypeFromValue(handle)), - object_(Unique<Object>::CreateUninitialized(handle)), +HConstant::HConstant(Handle<Object> object, Representation r) + : HTemplateInstruction<0>(HType::TypeFromValue(object)), + object_(Unique<Object>::CreateUninitialized(object)), + object_map_(Handle<Map>::null()), + has_stable_map_value_(false), has_smi_value_(false), has_int32_value_(false), has_double_value_(false), has_external_reference_value_(false), is_not_in_new_space_(true), - boolean_value_(handle->BooleanValue()), + boolean_value_(object->BooleanValue()), is_undetectable_(false), instance_type_(kUnknownInstanceType) { - if (handle->IsHeapObject()) { - Handle<HeapObject> heap_obj = Handle<HeapObject>::cast(handle); - Heap* heap = heap_obj->GetHeap(); - is_not_in_new_space_ = !heap->InNewSpace(*handle); - instance_type_ = heap_obj->map()->instance_type(); - is_undetectable_ = heap_obj->map()->is_undetectable(); - } - if (handle->IsNumber()) { - double n = handle->Number(); + if (object->IsHeapObject()) { + Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object); + Isolate* isolate = heap_object->GetIsolate(); + Handle<Map> map(heap_object->map(), isolate); + is_not_in_new_space_ = !isolate->heap()->InNewSpace(*object); + instance_type_ = map->instance_type(); + is_undetectable_ = map->is_undetectable(); + if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map); + has_stable_map_value_ = (instance_type_ == MAP_TYPE && + Handle<Map>::cast(heap_object)->is_stable()); + } + if (object->IsNumber()) { + double n = object->Number(); has_int32_value_ = IsInteger32(n); int32_value_ = DoubleToInt32(n); has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); @@ -2709,7 +2730,9 @@ HConstant::HConstant(Handle<Object> handle, Representation r) } -HConstant::HConstant(Unique<Object> unique, +HConstant::HConstant(Unique<Object> object, + Unique<Map> object_map, + bool has_stable_map_value, Representation r, HType type, bool is_not_in_new_space, @@ -2717,7 +2740,9 @@ HConstant::HConstant(Unique<Object> unique, bool is_undetectable, InstanceType instance_type) : HTemplateInstruction<0>(type), - object_(unique), + object_(object), + object_map_(object_map), + has_stable_map_value_(has_stable_map_value), has_smi_value_(false), has_int32_value_(false), has_double_value_(false), @@ -2726,7 +2751,7 @@ HConstant::HConstant(Unique<Object> unique, boolean_value_(boolean_value), is_undetectable_(is_undetectable), instance_type_(instance_type) { - ASSERT(!unique.handle().is_null()); + ASSERT(!object.handle().is_null()); ASSERT(!type.IsTaggedNumber()); Initialize(r); } @@ -2737,6 +2762,8 @@ HConstant::HConstant(int32_t integer_value, bool is_not_in_new_space, Unique<Object> object) : object_(object), + object_map_(Handle<Map>::null()), + has_stable_map_value_(false), has_smi_value_(Smi::IsValid(integer_value)), has_int32_value_(true), has_double_value_(true), @@ -2761,6 +2788,8 @@ HConstant::HConstant(double double_value, bool is_not_in_new_space, Unique<Object> object) : object_(object), + object_map_(Handle<Map>::null()), + has_stable_map_value_(false), has_int32_value_(IsInteger32(double_value)), has_double_value_(true), has_external_reference_value_(false), @@ -2783,6 +2812,8 @@ HConstant::HConstant(double double_value, HConstant::HConstant(ExternalReference reference) : HTemplateInstruction<0>(HType::None()), object_(Unique<Object>(Handle<Object>::null())), + object_map_(Handle<Map>::null()), + has_stable_map_value_(false), has_smi_value_(false), has_int32_value_(false), has_double_value_(false), @@ -2888,6 +2919,8 @@ HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const { } ASSERT(!object_.handle().is_null()); return new(zone) HConstant(object_, + object_map_, + has_stable_map_value_, r, type_, is_not_in_new_space_, @@ -2939,6 +2972,13 @@ void HConstant::PrintDataTo(StringStream* stream) { external_reference_value_.address())); } else { handle(Isolate::Current())->ShortPrint(stream); + stream->Add(" "); + if (HasStableMapValue()) { + stream->Add("[stable-map] "); + } + if (HasObjectMap()) { + stream->Add("[map %p] ", *ObjectMap().handle()); + } } if (!is_not_in_new_space_) { stream->Add("[new space] "); @@ -3284,6 +3324,21 @@ void HCompareHoleAndBranch::InferRepresentation( } +bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) { + if (left() == right() && + left()->representation().IsSmiOrInteger32()) { + *block = (token() == Token::EQ || + token() == Token::EQ_STRICT || + token() == Token::LTE || + token() == Token::GTE) + ? FirstSuccessor() : SecondSuccessor(); + return true; + } + *block = NULL; + return false; +} + + bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) { if (FLAG_fold_constants && value()->IsConstant()) { HConstant* constant = HConstant::cast(value()); @@ -3363,6 +3418,14 @@ void HLoadNamedField::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); access_.PrintTo(stream); + if (maps() != NULL) { + stream->Add(" [%p", *maps()->at(0).handle()); + for (int i = 1; i < maps()->size(); ++i) { + stream->Add(",%p", *maps()->at(i).handle()); + } + stream->Add("]"); + } + if (HasDependency()) { stream->Add(" "); dependency()->PrintNameTo(stream); @@ -3370,28 +3433,6 @@ void HLoadNamedField::PrintDataTo(StringStream* stream) { } -HCheckMaps* HCheckMaps::New(Zone* zone, - HValue* context, - HValue* value, - Handle<Map> map, - CompilationInfo* info, - HValue* typecheck) { - HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); - check_map->Add(map, zone); - if (map->CanOmitMapChecks() && - value->IsConstant() && - HConstant::cast(value)->HasMap(map)) { - // TODO(titzer): collect dependent map checks into a list. - check_map->omit_ = true; - if (map->CanTransition()) { - map->AddDependentCompilationInfo( - DependentCode::kPrototypeCheckGroup, info); - } - } - return check_map; -} - - void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); stream->Add("."); @@ -3672,6 +3713,12 @@ HType HChange::CalculateInferredType() { Representation HUnaryMathOperation::RepresentationFromInputs() { + if (SupportsFlexibleFloorAndRound() && + (op_ == kMathFloor || op_ == kMathRound)) { + // Floor and Round always take a double input. The integral result can be + // used as an integer or a double. Infer the representation from the uses. + return Representation::None(); + } Representation rep = representation(); // If any of the actual input representation is more general than what we // have so far but not Tagged, use that representation instead. @@ -3900,12 +3947,9 @@ void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) { HInnerAllocatedObject::New(zone, context(), dominating_allocate_, dominating_allocate_->size(), type()); free_space_instr->InsertBefore(this); - HConstant* filler_map = HConstant::New( - zone, - context(), - isolate()->factory()->free_space_map()); - filler_map->FinalizeUniqueness(); // TODO(titzer): should be init'd a'ready - filler_map->InsertAfter(free_space_instr); + HConstant* filler_map = HConstant::CreateAndInsertAfter( + zone, Unique<Map>::CreateImmovable( + isolate()->factory()->free_space_map()), free_space_instr); HInstruction* store_map = HStoreNamedField::New(zone, context(), free_space_instr, HObjectAccess::ForMap(), filler_map); store_map->SetFlag(HValue::kHasNoObservableSideEffects); @@ -4066,7 +4110,7 @@ HInstruction* HStringCharFromCode::New( if (std::isfinite(c_code->DoubleValue())) { uint32_t code = c_code->NumberValueAsInteger32() & 0xffff; return HConstant::New(zone, context, - LookupSingleCharacterStringFromCode(isolate, code)); + isolate->factory()->LookupSingleCharacterStringFromCode(code)); } return HConstant::New(zone, context, isolate->factory()->empty_string()); } @@ -4140,6 +4184,43 @@ HInstruction* HUnaryMathOperation::New( } +Representation HUnaryMathOperation::RepresentationFromUses() { + if (op_ != kMathFloor && op_ != kMathRound) { + return HValue::RepresentationFromUses(); + } + + // The instruction can have an int32 or double output. Prefer a double + // representation if there are double uses. + bool use_double = false; + + for (HUseIterator it(uses()); !it.Done(); it.Advance()) { + HValue* use = it.value(); + int use_index = it.index(); + Representation rep_observed = use->observed_input_representation(use_index); + Representation rep_required = use->RequiredInputRepresentation(use_index); + use_double |= (rep_observed.IsDouble() || rep_required.IsDouble()); + if (use_double && !FLAG_trace_representation) { + // Having seen one double is enough. + break; + } + if (FLAG_trace_representation) { + if (!rep_required.IsDouble() || rep_observed.IsDouble()) { + PrintF("#%d %s is used by #%d %s as %s%s\n", + id(), Mnemonic(), use->id(), + use->Mnemonic(), rep_observed.Mnemonic(), + (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); + } else { + PrintF("#%d %s is required by #%d %s as %s%s\n", + id(), Mnemonic(), use->id(), + use->Mnemonic(), rep_required.Mnemonic(), + (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); + } + } + } + return use_double ? Representation::Double() : Representation::Integer32(); +} + + HInstruction* HPower::New(Zone* zone, HValue* context, HValue* left, @@ -4455,7 +4536,7 @@ void HPhi::Verify() { void HSimulate::Verify() { HInstruction::Verify(); - ASSERT(HasAstId()); + ASSERT(HasAstId() || next()->IsEnterInlined()); } diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h index 1e6ac19bf..1cdca4c46 100644 --- a/deps/v8/src/hydrogen-instructions.h +++ b/deps/v8/src/hydrogen-instructions.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_INSTRUCTIONS_H_ #define V8_HYDROGEN_INSTRUCTIONS_H_ @@ -32,13 +9,13 @@ #include "allocation.h" #include "code-stubs.h" +#include "conversions.h" #include "data-flow.h" #include "deoptimizer.h" #include "small-pointer-list.h" #include "string-stream.h" #include "unique.h" -#include "v8conversions.h" -#include "v8utils.h" +#include "utils.h" #include "zone.h" namespace v8 { @@ -184,7 +161,6 @@ class LChunkBuilder; V(WrapReceiver) #define GVN_TRACKED_FLAG_LIST(V) \ - V(Maps) \ V(NewSpacePromotion) #define GVN_UNTRACKED_FLAG_LIST(V) \ @@ -200,6 +176,7 @@ class LChunkBuilder; V(ElementsPointer) \ V(GlobalVars) \ V(InobjectFields) \ + V(Maps) \ V(OsrEntries) \ V(ExternalMemory) \ V(StringChars) \ @@ -678,12 +655,19 @@ class HValue : public ZoneObject { HYDROGEN_ABSTRACT_INSTRUCTION_LIST(DECLARE_PREDICATE) #undef DECLARE_PREDICATE + bool IsBitwiseBinaryShift() { + return IsShl() || IsShr() || IsSar(); + } + HValue(HType type = HType::Tagged()) : block_(NULL), id_(kNoNumber), type_(type), use_list_(NULL), range_(NULL), +#ifdef DEBUG + range_poisoned_(false), +#endif flags_(0) {} virtual ~HValue() {} @@ -854,9 +838,17 @@ class HValue : public ZoneObject { return result; } - Range* range() const { return range_; } - // TODO(svenpanne) We should really use the null object pattern here. - bool HasRange() const { return range_ != NULL; } + Range* range() const { + ASSERT(!range_poisoned_); + return range_; + } + bool HasRange() const { + ASSERT(!range_poisoned_); + return range_ != NULL; + } +#ifdef DEBUG + void PoisonRange() { range_poisoned_ = true; } +#endif void AddNewRange(Range* r, Zone* zone); void RemoveLastAddedRange(); void ComputeInitialRange(Zone* zone); @@ -888,7 +880,6 @@ class HValue : public ZoneObject { virtual void PrintTo(StringStream* stream) = 0; void PrintNameTo(StringStream* stream); void PrintTypeTo(StringStream* stream); - void PrintRangeTo(StringStream* stream); void PrintChangesTo(StringStream* stream); const char* Mnemonic() const; @@ -962,7 +953,7 @@ class HValue : public ZoneObject { virtual Representation RepresentationFromInputs() { return representation(); } - Representation RepresentationFromUses(); + virtual Representation RepresentationFromUses(); Representation RepresentationFromUseRequirements(); bool HasNonSmiUse(); virtual void UpdateRepresentation(Representation new_rep, @@ -1028,6 +1019,9 @@ class HValue : public ZoneObject { HType type_; HUseListNode* use_list_; Range* range_; +#ifdef DEBUG + bool range_poisoned_; +#endif int flags_; GVNFlagSet changes_flags_; GVNFlagSet depends_on_flags_; @@ -1263,6 +1257,7 @@ class HInstruction : public HValue { position_.set_operand_position(index, pos); } + bool CanTruncateToSmi() const { return CheckFlag(kTruncatingToSmi); } bool CanTruncateToInt32() const { return CheckFlag(kTruncatingToInt32); } virtual LInstruction* CompileToLithium(LChunkBuilder* builder) = 0; @@ -1529,6 +1524,8 @@ class HBranch V8_FINAL : public HUnaryControlInstruction { virtual bool KnownSuccessorBlock(HBasicBlock** block) V8_OVERRIDE; + virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; + ToBooleanStub::Types expected_input_types() const { return expected_input_types_; } @@ -1733,7 +1730,7 @@ class HChange V8_FINAL : public HUnaryOperation { set_representation(to); SetFlag(kUseGVN); SetFlag(kCanOverflow); - if (is_truncating_to_smi) { + if (is_truncating_to_smi && to.IsSmi()) { SetFlag(kTruncatingToSmi); SetFlag(kTruncatingToInt32); } @@ -2069,14 +2066,15 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { public: static HEnterInlined* New(Zone* zone, HValue* context, + BailoutId return_id, Handle<JSFunction> closure, int arguments_count, FunctionLiteral* function, InliningKind inlining_kind, Variable* arguments_var, HArgumentsObject* arguments_object) { - return new(zone) HEnterInlined(closure, arguments_count, function, - inlining_kind, arguments_var, + return new(zone) HEnterInlined(return_id, closure, arguments_count, + function, inlining_kind, arguments_var, arguments_object, zone); } @@ -2091,6 +2089,7 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { void set_arguments_pushed() { arguments_pushed_ = true; } FunctionLiteral* function() const { return function_; } InliningKind inlining_kind() const { return inlining_kind_; } + BailoutId ReturnId() const { return return_id_; } virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE { return Representation::None(); @@ -2102,14 +2101,16 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { DECLARE_CONCRETE_INSTRUCTION(EnterInlined) private: - HEnterInlined(Handle<JSFunction> closure, + HEnterInlined(BailoutId return_id, + Handle<JSFunction> closure, int arguments_count, FunctionLiteral* function, InliningKind inlining_kind, Variable* arguments_var, HArgumentsObject* arguments_object, Zone* zone) - : closure_(closure), + : return_id_(return_id), + closure_(closure), arguments_count_(arguments_count), arguments_pushed_(false), function_(function), @@ -2119,6 +2120,7 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { return_targets_(2, zone) { } + BailoutId return_id_; Handle<JSFunction> closure_; int arguments_count_; bool arguments_pushed_; @@ -2640,6 +2642,7 @@ class HUnaryMathOperation V8_FINAL : public HTemplateInstruction<2> { virtual Range* InferRange(Zone* zone) V8_OVERRIDE; virtual HValue* Canonicalize() V8_OVERRIDE; + virtual Representation RepresentationFromUses() V8_OVERRIDE; virtual Representation RepresentationFromInputs() V8_OVERRIDE; BuiltinFunctionId op() const { return op_; } @@ -2654,6 +2657,15 @@ class HUnaryMathOperation V8_FINAL : public HTemplateInstruction<2> { } private: + // Indicates if we support a double (and int32) output for Math.floor and + // Math.round. + bool SupportsFlexibleFloorAndRound() const { +#ifdef V8_TARGET_ARCH_ARM64 + return true; +#else + return false; +#endif + } HUnaryMathOperation(HValue* context, HValue* value, BuiltinFunctionId op) : HTemplateInstruction<2>(HType::TaggedNumber()), op_(op) { SetOperandAt(0, context); @@ -2661,6 +2673,12 @@ class HUnaryMathOperation V8_FINAL : public HTemplateInstruction<2> { switch (op) { case kMathFloor: case kMathRound: + if (SupportsFlexibleFloorAndRound()) { + SetFlag(kFlexibleRepresentation); + } else { + set_representation(Representation::Integer32()); + } + break; case kMathClz32: set_representation(Representation::Integer32()); break; @@ -2730,81 +2748,112 @@ class HLoadRoot V8_FINAL : public HTemplateInstruction<0> { class HCheckMaps V8_FINAL : public HTemplateInstruction<2> { public: static HCheckMaps* New(Zone* zone, HValue* context, HValue* value, - Handle<Map> map, CompilationInfo* info, - HValue* typecheck = NULL); + Handle<Map> map, HValue* typecheck = NULL) { + return new(zone) HCheckMaps(value, new(zone) UniqueSet<Map>( + Unique<Map>::CreateImmovable(map), zone), typecheck); + } static HCheckMaps* New(Zone* zone, HValue* context, - HValue* value, SmallMapList* maps, + HValue* value, SmallMapList* map_list, HValue* typecheck = NULL) { - HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); - for (int i = 0; i < maps->length(); i++) { - check_map->Add(maps->at(i), zone); + UniqueSet<Map>* maps = new(zone) UniqueSet<Map>(map_list->length(), zone); + for (int i = 0; i < map_list->length(); ++i) { + maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone); } - return check_map; + return new(zone) HCheckMaps(value, maps, typecheck); } - bool CanOmitMapChecks() { return omit_; } + bool IsStabilityCheck() const { return is_stability_check_; } + void MarkAsStabilityCheck() { + has_migration_target_ = false; + is_stability_check_ = true; + ClearChangesFlag(kNewSpacePromotion); + ClearDependsOnFlag(kElementsKind); + ClearDependsOnFlag(kMaps); + } virtual bool HasEscapingOperandAt(int index) V8_OVERRIDE { return false; } virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE { return Representation::Tagged(); } - virtual bool HandleSideEffectDominator(GVNFlag side_effect, - HValue* dominator) V8_OVERRIDE; virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; HValue* value() { return OperandAt(0); } HValue* typecheck() { return OperandAt(1); } - Unique<Map> first_map() const { return map_set_.at(0); } - UniqueSet<Map> map_set() const { return map_set_; } + const UniqueSet<Map>* maps() const { return maps_; } + void set_maps(const UniqueSet<Map>* maps) { maps_ = maps; } - void set_map_set(UniqueSet<Map>* maps, Zone *zone) { - map_set_.Clear(); - for (int i = 0; i < maps->size(); i++) { - map_set_.Add(maps->at(i), zone); - } + bool maps_are_stable() const { return maps_are_stable_; } + + bool HasMigrationTarget() const { return has_migration_target_; } + + virtual HValue* Canonicalize() V8_OVERRIDE; + + static HCheckMaps* CreateAndInsertAfter(Zone* zone, + HValue* value, + Unique<Map> map, + bool map_is_stable, + HInstruction* instr) { + return CreateAndInsertAfter(zone, value, new(zone) UniqueSet<Map>( + map, zone), map_is_stable, instr); } - bool has_migration_target() const { - return has_migration_target_; + static HCheckMaps* CreateAndInsertAfter(Zone* zone, + HValue* value, + const UniqueSet<Map>* maps, + bool maps_are_stable, + HInstruction* instr) { + return instr->Append(new(zone) HCheckMaps(value, maps, maps_are_stable)); } DECLARE_CONCRETE_INSTRUCTION(CheckMaps) protected: virtual bool DataEquals(HValue* other) V8_OVERRIDE { - return this->map_set_.Equals(&HCheckMaps::cast(other)->map_set_); + return this->maps()->Equals(HCheckMaps::cast(other)->maps()); } virtual int RedefinedOperandIndex() { return 0; } private: - void Add(Handle<Map> map, Zone* zone) { - map_set_.Add(Unique<Map>(map), zone); + HCheckMaps(HValue* value, const UniqueSet<Map>* maps, bool maps_are_stable) + : HTemplateInstruction<2>(value->type()), maps_(maps), + has_migration_target_(false), is_stability_check_(false), + maps_are_stable_(maps_are_stable) { + ASSERT_NE(0, maps->size()); + SetOperandAt(0, value); + // Use the object value for the dependency. + SetOperandAt(1, value); + set_representation(Representation::Tagged()); + SetFlag(kUseGVN); SetDependsOnFlag(kMaps); SetDependsOnFlag(kElementsKind); - - if (!has_migration_target_ && map->is_migration_target()) { - has_migration_target_ = true; - SetChangesFlag(kNewSpacePromotion); - } } - // Clients should use one of the static New* methods above. - HCheckMaps(HValue* value, Zone *zone, HValue* typecheck) - : HTemplateInstruction<2>(value->type()), - omit_(false), has_migration_target_(false) { + HCheckMaps(HValue* value, const UniqueSet<Map>* maps, HValue* typecheck) + : HTemplateInstruction<2>(value->type()), maps_(maps), + has_migration_target_(false), is_stability_check_(false), + maps_are_stable_(true) { + ASSERT_NE(0, maps->size()); SetOperandAt(0, value); // Use the object value for the dependency if NULL is passed. - SetOperandAt(1, typecheck != NULL ? typecheck : value); + SetOperandAt(1, typecheck ? typecheck : value); set_representation(Representation::Tagged()); SetFlag(kUseGVN); - SetFlag(kTrackSideEffectDominators); + SetDependsOnFlag(kMaps); + SetDependsOnFlag(kElementsKind); + for (int i = 0; i < maps->size(); ++i) { + Handle<Map> map = maps->at(i).handle(); + if (map->is_migration_target()) has_migration_target_ = true; + if (!map->is_stable()) maps_are_stable_ = false; + } + if (has_migration_target_) SetChangesFlag(kNewSpacePromotion); } - bool omit_; - bool has_migration_target_; - UniqueSet<Map> map_set_; + const UniqueSet<Map>* maps_; + bool has_migration_target_ : 1; + bool is_stability_check_ : 1; + bool maps_are_stable_ : 1; }; @@ -3377,8 +3426,6 @@ class HArgumentsObject V8_FINAL : public HDematerializedObject { set_representation(Representation::Tagged()); SetFlag(kIsArguments); } - - virtual bool IsDeletable() const V8_FINAL V8_OVERRIDE { return true; } }; @@ -3450,12 +3497,22 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> { } static HConstant* CreateAndInsertBefore(Zone* zone, - Unique<Object> unique, + Unique<Object> object, bool is_not_in_new_space, HInstruction* instruction) { return instruction->Prepend(new(zone) HConstant( - unique, Representation::Tagged(), HType::Tagged(), - is_not_in_new_space, false, false, kUnknownInstanceType)); + object, Unique<Map>(Handle<Map>::null()), false, + Representation::Tagged(), HType::Tagged(), is_not_in_new_space, + false, false, kUnknownInstanceType)); + } + + static HConstant* CreateAndInsertAfter(Zone* zone, + Unique<Map> map, + HInstruction* instruction) { + return instruction->Append(new(zone) HConstant( + map, Unique<Map>(Handle<Map>::null()), false, + Representation::Tagged(), HType::Tagged(), true, + false, false, MAP_TYPE)); } Handle<Object> handle(Isolate* isolate) { @@ -3470,12 +3527,6 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> { return object_.handle(); } - bool HasMap(Handle<Map> map) { - Handle<Object> constant_object = handle(map->GetIsolate()); - return constant_object->IsHeapObject() && - Handle<HeapObject>::cast(constant_object)->map() == *map; - } - bool IsSpecialDouble() const { return has_double_value_ && (BitCast<int64_t>(double_value_) == BitCast<int64_t>(-0.0) || @@ -3560,6 +3611,22 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> { bool IsUndetectable() const { return is_undetectable_; } InstanceType GetInstanceType() const { return instance_type_; } + bool HasMapValue() const { return instance_type_ == MAP_TYPE; } + Unique<Map> MapValue() const { + ASSERT(HasMapValue()); + return Unique<Map>::cast(GetUnique()); + } + bool HasStableMapValue() const { + ASSERT(HasMapValue() || !has_stable_map_value_); + return has_stable_map_value_; + } + + bool HasObjectMap() const { return !object_map_.IsNull(); } + Unique<Map> ObjectMap() const { + ASSERT(HasObjectMap()); + return object_map_; + } + virtual intptr_t Hashcode() V8_OVERRIDE { if (has_int32_value_) { return static_cast<intptr_t>(int32_value_); @@ -3632,7 +3699,9 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> { Representation r = Representation::None(), bool is_not_in_new_space = true, Unique<Object> optional = Unique<Object>(Handle<Object>::null())); - HConstant(Unique<Object> unique, + HConstant(Unique<Object> object, + Unique<Map> object_map, + bool has_stable_map_value, Representation r, HType type, bool is_not_in_new_space, @@ -3652,6 +3721,12 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> { // constant HeapObject. Unique<Object> object_; + // If object_ is a heap object, this points to the stable map of the object. + Unique<Map> object_map_; + + // If object_ is a map, this indicates whether the map is stable. + bool has_stable_map_value_ : 1; + // We store the HConstant in the most specific form safely possible. // The two flags, has_int32_value_ and has_double_value_ tell us if // int32_value_ and double_value_ hold valid, safe representations @@ -3761,7 +3836,7 @@ class HBinaryOperation : public HTemplateInstruction<3> { bool RightIsPowerOf2() { if (!right()->IsInteger32Constant()) return false; int32_t value = right()->GetInteger32Constant(); - return value != 0 && (IsPowerOf2(value) || IsPowerOf2(-value)); + return IsPowerOf2(value) || IsPowerOf2(-value); } DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation) @@ -4206,6 +4281,9 @@ class HCompareNumericAndBranch : public HTemplateControlInstruction<2, 2> { virtual Representation observed_input_representation(int index) V8_OVERRIDE { return observed_input_representation_[index]; } + + virtual bool KnownSuccessorBlock(HBasicBlock** block) V8_OVERRIDE; + virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; void SetOperandPositions(Zone* zone, @@ -5818,6 +5896,10 @@ class HObjectAccess V8_FINAL { return portion() == kStringLengths; } + inline bool IsMap() const { + return portion() == kMaps; + } + inline int offset() const { return OffsetField::decode(value_); } @@ -6025,6 +6107,11 @@ class HObjectAccess V8_FINAL { JSArrayBuffer::kBackingStoreOffset, Representation::External()); } + static HObjectAccess ForJSArrayBufferByteLength() { + return HObjectAccess::ForObservableJSObjectOffset( + JSArrayBuffer::kByteLengthOffset, Representation::Tagged()); + } + static HObjectAccess ForExternalArrayExternalPointer() { return HObjectAccess::ForObservableJSObjectOffset( ExternalArray::kExternalPointerOffset, Representation::External()); @@ -6125,8 +6212,10 @@ class HObjectAccess V8_FINAL { class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { public: - DECLARE_INSTRUCTION_FACTORY_P3(HLoadNamedField, HValue*, HValue*, - HObjectAccess); + DECLARE_INSTRUCTION_FACTORY_P3(HLoadNamedField, HValue*, + HValue*, HObjectAccess); + DECLARE_INSTRUCTION_FACTORY_P5(HLoadNamedField, HValue*, HValue*, + HObjectAccess, const UniqueSet<Map>*, HType); HValue* object() { return OperandAt(0); } HValue* dependency() { @@ -6139,6 +6228,8 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { return access_.representation(); } + const UniqueSet<Map>* maps() const { return maps_; } + virtual bool HasEscapingOperandAt(int index) V8_OVERRIDE { return false; } virtual bool HasOutOfBoundsAccess(int size) V8_OVERRIDE { return !access().IsInobject() || access().offset() >= size; @@ -6153,21 +6244,36 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { virtual Range* InferRange(Zone* zone) V8_OVERRIDE; virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; + bool CanBeReplacedWith(HValue* other) const { + if (!type().Equals(other->type())) return false; + if (!representation().Equals(other->representation())) return false; + if (!other->IsLoadNamedField()) return true; + HLoadNamedField* that = HLoadNamedField::cast(other); + if (this->maps_ == that->maps_) return true; + if (this->maps_ == NULL || that->maps_ == NULL) return false; + return this->maps_->IsSubset(that->maps_); + } + DECLARE_CONCRETE_INSTRUCTION(LoadNamedField) protected: virtual bool DataEquals(HValue* other) V8_OVERRIDE { - HLoadNamedField* b = HLoadNamedField::cast(other); - return access_.Equals(b->access_); + HLoadNamedField* that = HLoadNamedField::cast(other); + if (!this->access_.Equals(that->access_)) return false; + if (this->maps_ == that->maps_) return true; + return (this->maps_ != NULL && + that->maps_ != NULL && + this->maps_->Equals(that->maps_)); } private: HLoadNamedField(HValue* object, HValue* dependency, - HObjectAccess access) : access_(access) { - ASSERT(object != NULL); + HObjectAccess access) + : access_(access), maps_(NULL) { + ASSERT_NOT_NULL(object); SetOperandAt(0, object); - SetOperandAt(1, dependency != NULL ? dependency : object); + SetOperandAt(1, dependency ? dependency : object); Representation representation = access.representation(); if (representation.IsInteger8() || @@ -6187,6 +6293,8 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { representation.IsInteger32()) { set_representation(representation); } else if (representation.IsHeapObject()) { + // TODO(bmeurer): This is probably broken. What we actually want to to + // instead is set_representation(Representation::HeapObject()). set_type(HType::NonPrimitive()); set_representation(Representation::Tagged()); } else { @@ -6195,9 +6303,32 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { access.SetGVNFlags(this, LOAD); } + HLoadNamedField(HValue* object, + HValue* dependency, + HObjectAccess access, + const UniqueSet<Map>* maps, + HType type) + : HTemplateInstruction<2>(type), access_(access), maps_(maps) { + ASSERT_NOT_NULL(maps); + ASSERT_NE(0, maps->size()); + + ASSERT_NOT_NULL(object); + SetOperandAt(0, object); + SetOperandAt(1, dependency ? dependency : object); + + ASSERT(access.representation().IsHeapObject()); + // TODO(bmeurer): This is probably broken. What we actually want to to + // instead is set_representation(Representation::HeapObject()). + if (!type.IsHeapObject()) set_type(HType::NonPrimitive()); + set_representation(Representation::Tagged()); + + access.SetGVNFlags(this, LOAD); + } + virtual bool IsDeletable() const V8_OVERRIDE { return true; } HObjectAccess access_; + const UniqueSet<Map>* maps_; }; @@ -6573,13 +6704,9 @@ class HStoreNamedField V8_FINAL : public HTemplateInstruction<3> { } } - void SetTransition(HConstant* map_constant, CompilationInfo* info) { + void SetTransition(HConstant* transition) { ASSERT(!has_transition()); // Only set once. - Handle<Map> map = Handle<Map>::cast(map_constant->handle(info->isolate())); - if (map->CanBeDeprecated()) { - map->AddDependentCompilationInfo(DependentCode::kTransitionGroup, info); - } - SetOperandAt(2, map_constant); + SetOperandAt(2, transition); has_transition_ = true; } @@ -7399,6 +7526,8 @@ class HCheckMapValue V8_FINAL : public HTemplateInstruction<2> { HValue* value() { return OperandAt(0); } HValue* map() { return OperandAt(1); } + virtual HValue* Canonicalize() V8_OVERRIDE; + DECLARE_CONCRETE_INSTRUCTION(CheckMapValue) protected: @@ -7499,6 +7628,7 @@ class HLoadFieldByIndex V8_FINAL : public HTemplateInstruction<2> { HValue* index) { SetOperandAt(0, object); SetOperandAt(1, index); + SetChangesFlag(kNewSpacePromotion); set_representation(Representation::Tagged()); } diff --git a/deps/v8/src/hydrogen-load-elimination.cc b/deps/v8/src/hydrogen-load-elimination.cc index f84eac046..1198d2b7a 100644 --- a/deps/v8/src/hydrogen-load-elimination.cc +++ b/deps/v8/src/hydrogen-load-elimination.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-alias-analysis.h" #include "hydrogen-load-elimination.h" @@ -76,9 +53,7 @@ class HLoadEliminationTable : public ZoneObject { FieldOf(l->access()), l->object()->ActualValue()->id())); HValue* result = load(l); - if (result != instr && - result->type().Equals(instr->type()) && - result->representation().Equals(instr->representation())) { + if (result != instr && l->CanBeReplacedWith(result)) { // The load can be replaced with a previous load or a value. TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); instr->DeleteAndReplaceWith(result); diff --git a/deps/v8/src/hydrogen-load-elimination.h b/deps/v8/src/hydrogen-load-elimination.h index ef6f71fa1..98cd03d58 100644 --- a/deps/v8/src/hydrogen-load-elimination.h +++ b/deps/v8/src/hydrogen-load-elimination.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_LOAD_ELIMINATION_H_ #define V8_HYDROGEN_LOAD_ELIMINATION_H_ diff --git a/deps/v8/src/hydrogen-mark-deoptimize.cc b/deps/v8/src/hydrogen-mark-deoptimize.cc index c0236e91c..338e239e2 100644 --- a/deps/v8/src/hydrogen-mark-deoptimize.cc +++ b/deps/v8/src/hydrogen-mark-deoptimize.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-mark-deoptimize.h" diff --git a/deps/v8/src/hydrogen-mark-deoptimize.h b/deps/v8/src/hydrogen-mark-deoptimize.h index 30f35b3de..7b302fcc2 100644 --- a/deps/v8/src/hydrogen-mark-deoptimize.h +++ b/deps/v8/src/hydrogen-mark-deoptimize.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_MARK_DEOPTIMIZE_H_ #define V8_HYDROGEN_MARK_DEOPTIMIZE_H_ diff --git a/deps/v8/src/hydrogen-mark-unreachable.cc b/deps/v8/src/hydrogen-mark-unreachable.cc index d7c5ed2b1..c80de639e 100644 --- a/deps/v8/src/hydrogen-mark-unreachable.cc +++ b/deps/v8/src/hydrogen-mark-unreachable.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-mark-unreachable.h" diff --git a/deps/v8/src/hydrogen-mark-unreachable.h b/deps/v8/src/hydrogen-mark-unreachable.h index 9ecc6e9f1..a406c5cff 100644 --- a/deps/v8/src/hydrogen-mark-unreachable.h +++ b/deps/v8/src/hydrogen-mark-unreachable.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_MARK_UNREACHABLE_H_ #define V8_HYDROGEN_MARK_UNREACHABLE_H_ diff --git a/deps/v8/src/hydrogen-osr.cc b/deps/v8/src/hydrogen-osr.cc index 6e39df6aa..ff46ea7eb 100644 --- a/deps/v8/src/hydrogen-osr.cc +++ b/deps/v8/src/hydrogen-osr.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen.h" #include "hydrogen-osr.h" diff --git a/deps/v8/src/hydrogen-osr.h b/deps/v8/src/hydrogen-osr.h index ae72ce650..00bb2d4e1 100644 --- a/deps/v8/src/hydrogen-osr.h +++ b/deps/v8/src/hydrogen-osr.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_OSR_H_ #define V8_HYDROGEN_OSR_H_ diff --git a/deps/v8/src/hydrogen-range-analysis.cc b/deps/v8/src/hydrogen-range-analysis.cc index 9d58fc89f..609dd88fa 100644 --- a/deps/v8/src/hydrogen-range-analysis.cc +++ b/deps/v8/src/hydrogen-range-analysis.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-range-analysis.h" @@ -123,6 +100,22 @@ void HRangeAnalysisPhase::Run() { block = NULL; } } + + // The ranges are not valid anymore due to SSI vs. SSA! + PoisonRanges(); +} + + +void HRangeAnalysisPhase::PoisonRanges() { +#ifdef DEBUG + for (int i = 0; i < graph()->blocks()->length(); ++i) { + HBasicBlock* block = graph()->blocks()->at(i); + for (HInstructionIterator it(block); !it.Done(); it.Advance()) { + HInstruction* instr = it.Current(); + if (instr->HasRange()) instr->PoisonRange(); + } + } +#endif } diff --git a/deps/v8/src/hydrogen-range-analysis.h b/deps/v8/src/hydrogen-range-analysis.h index e0cc3c5da..2ed2ecb22 100644 --- a/deps/v8/src/hydrogen-range-analysis.h +++ b/deps/v8/src/hydrogen-range-analysis.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_RANGE_ANALYSIS_H_ #define V8_HYDROGEN_RANGE_ANALYSIS_H_ @@ -57,6 +34,7 @@ class HRangeAnalysisPhase : public HPhase { worklist_.Add(value, zone()); } void PropagateMinusZeroChecks(HValue* value); + void PoisonRanges(); ZoneList<HValue*> changed_ranges_; diff --git a/deps/v8/src/hydrogen-redundant-phi.cc b/deps/v8/src/hydrogen-redundant-phi.cc index 1263833da..5757cfb0a 100644 --- a/deps/v8/src/hydrogen-redundant-phi.cc +++ b/deps/v8/src/hydrogen-redundant-phi.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-redundant-phi.h" diff --git a/deps/v8/src/hydrogen-redundant-phi.h b/deps/v8/src/hydrogen-redundant-phi.h index 960ae69c9..9e1092b39 100644 --- a/deps/v8/src/hydrogen-redundant-phi.h +++ b/deps/v8/src/hydrogen-redundant-phi.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_REDUNDANT_PHI_H_ #define V8_HYDROGEN_REDUNDANT_PHI_H_ diff --git a/deps/v8/src/hydrogen-removable-simulates.cc b/deps/v8/src/hydrogen-removable-simulates.cc index f95283243..7bbe3cbf2 100644 --- a/deps/v8/src/hydrogen-removable-simulates.cc +++ b/deps/v8/src/hydrogen-removable-simulates.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-removable-simulates.h" @@ -41,14 +18,17 @@ void HMergeRemovableSimulatesPhase::Run() { bool first = true; for (HInstructionIterator it(block); !it.Done(); it.Advance()) { HInstruction* current = it.Current(); + if (current->IsEnterInlined()) { + // Ensure there's a non-foldable HSimulate before an HEnterInlined to + // avoid folding across HEnterInlined. + ASSERT(!HSimulate::cast(current->previous())-> + is_candidate_for_removal()); + } if (current->IsLeaveInlined()) { - // Never fold simulates from inlined environments into simulates - // in the outer environment. - // (Before each HEnterInlined, there is a non-foldable HSimulate - // anyway, so we get the barrier in the other direction for free.) - // Simply remove all accumulated simulates without merging. This - // is safe because simulates after instructions with side effects - // are never added to the merge list. + // Never fold simulates from inlined environments into simulates in the + // outer environment. Simply remove all accumulated simulates without + // merging. This is safe because simulates after instructions with side + // effects are never added to the merge list. while (!mergelist.is_empty()) { mergelist.RemoveLast()->DeleteAndReplaceWith(NULL); } @@ -70,13 +50,24 @@ void HMergeRemovableSimulatesPhase::Run() { continue; } HSimulate* current_simulate = HSimulate::cast(current); - if ((current_simulate->previous()->HasObservableSideEffects() && - !current_simulate->next()->IsSimulate()) || - !current_simulate->is_candidate_for_removal()) { - // This simulate is not suitable for folding. - // Fold the ones accumulated so far. + if (!current_simulate->is_candidate_for_removal()) { + current_simulate->MergeWith(&mergelist); + } else if (current_simulate->ast_id().IsNone()) { + ASSERT(current_simulate->next()->IsEnterInlined()); + if (!mergelist.is_empty()) { + HSimulate* last = mergelist.RemoveLast(); + last->MergeWith(&mergelist); + } + } else if (current_simulate->previous()->HasObservableSideEffects()) { + while (current_simulate->next()->IsSimulate()) { + it.Advance(); + HSimulate* next_simulate = HSimulate::cast(it.Current()); + if (next_simulate->ast_id().IsNone()) break; + mergelist.Add(current_simulate, zone()); + current_simulate = next_simulate; + if (!current_simulate->is_candidate_for_removal()) break; + } current_simulate->MergeWith(&mergelist); - continue; } else { // Accumulate this simulate for folding later on. mergelist.Add(current_simulate, zone()); diff --git a/deps/v8/src/hydrogen-removable-simulates.h b/deps/v8/src/hydrogen-removable-simulates.h index f5bcd6ddf..aec5fce1d 100644 --- a/deps/v8/src/hydrogen-removable-simulates.h +++ b/deps/v8/src/hydrogen-removable-simulates.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_REMOVABLE_SIMULATES_H_ #define V8_HYDROGEN_REMOVABLE_SIMULATES_H_ diff --git a/deps/v8/src/hydrogen-representation-changes.cc b/deps/v8/src/hydrogen-representation-changes.cc index 0b87d12eb..15f1a6e44 100644 --- a/deps/v8/src/hydrogen-representation-changes.cc +++ b/deps/v8/src/hydrogen-representation-changes.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-representation-changes.h" diff --git a/deps/v8/src/hydrogen-representation-changes.h b/deps/v8/src/hydrogen-representation-changes.h index 77e899b60..ff57e1929 100644 --- a/deps/v8/src/hydrogen-representation-changes.h +++ b/deps/v8/src/hydrogen-representation-changes.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_REPRESENTATION_CHANGES_H_ #define V8_HYDROGEN_REPRESENTATION_CHANGES_H_ diff --git a/deps/v8/src/hydrogen-sce.cc b/deps/v8/src/hydrogen-sce.cc index 70b0a0c08..dfb3a7e3a 100644 --- a/deps/v8/src/hydrogen-sce.cc +++ b/deps/v8/src/hydrogen-sce.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-sce.h" #include "v8.h" diff --git a/deps/v8/src/hydrogen-sce.h b/deps/v8/src/hydrogen-sce.h index 55e153e0e..48ac8e331 100644 --- a/deps/v8/src/hydrogen-sce.h +++ b/deps/v8/src/hydrogen-sce.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_SCE_H_ #define V8_HYDROGEN_SCE_H_ diff --git a/deps/v8/src/hydrogen-store-elimination.cc b/deps/v8/src/hydrogen-store-elimination.cc index 2e6ee5138..cf5f3a15e 100644 --- a/deps/v8/src/hydrogen-store-elimination.cc +++ b/deps/v8/src/hydrogen-store-elimination.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-store-elimination.h" #include "hydrogen-instructions.h" @@ -129,7 +106,7 @@ void HStoreEliminationPhase::ProcessInstr(HInstruction* instr, unobserved_.Rewind(0); return; } - if (instr->ChangesFlags().ContainsAnyOf(flags)) { + if (instr->DependsOnFlags().ContainsAnyOf(flags)) { TRACE(("-- Observed stores at I%d (GVN flags)\n", instr->id())); unobserved_.Rewind(0); return; diff --git a/deps/v8/src/hydrogen-store-elimination.h b/deps/v8/src/hydrogen-store-elimination.h index 7dc871c9b..e697708c3 100644 --- a/deps/v8/src/hydrogen-store-elimination.h +++ b/deps/v8/src/hydrogen-store-elimination.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_STORE_ELIMINATION_H_ #define V8_HYDROGEN_STORE_ELIMINATION_H_ diff --git a/deps/v8/src/hydrogen-uint32-analysis.cc b/deps/v8/src/hydrogen-uint32-analysis.cc index 63b1de697..21fbec9f3 100644 --- a/deps/v8/src/hydrogen-uint32-analysis.cc +++ b/deps/v8/src/hydrogen-uint32-analysis.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen-uint32-analysis.h" diff --git a/deps/v8/src/hydrogen-uint32-analysis.h b/deps/v8/src/hydrogen-uint32-analysis.h index 59739d1cc..8d672ac6a 100644 --- a/deps/v8/src/hydrogen-uint32-analysis.h +++ b/deps/v8/src/hydrogen-uint32-analysis.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_UINT32_ANALYSIS_H_ #define V8_HYDROGEN_UINT32_ANALYSIS_H_ diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc index a7ef0cbd0..ee9f8e416 100644 --- a/deps/v8/src/hydrogen.cc +++ b/deps/v8/src/hydrogen.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "hydrogen.h" @@ -701,11 +678,13 @@ HConstant* HGraph::GetConstantMinus1() { } -#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \ +#define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \ HConstant* HGraph::GetConstant##Name() { \ if (!constant_##name##_.is_set()) { \ HConstant* constant = new(zone()) HConstant( \ Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \ + Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \ + false, \ Representation::Tagged(), \ htype, \ true, \ @@ -719,11 +698,11 @@ HConstant* HGraph::GetConstant##Name() { \ } -DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false) -DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true) -DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false) -DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false) -DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false) +DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Tagged(), false) +DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true) +DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false) +DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::Tagged(), false) +DEFINE_GET_CONSTANT(Null, null, null, HType::Tagged(), false) #undef DEFINE_GET_CONSTANT @@ -1260,11 +1239,6 @@ void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) { } -HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) { - return Add<HCheckMaps>(obj, map, top_info()); -} - - HValue* HGraphBuilder::BuildCheckString(HValue* string) { if (!string->type().IsString()) { ASSERT(!string->IsConstant() || @@ -1314,11 +1288,8 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow( HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap); - IfBuilder key_checker(this); - key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT); - key_checker.Then(); - key_checker.ElseDeopt("Key out of capacity range"); - key_checker.End(); + + Add<HBoundsCheck>(key, max_capacity); HValue* new_capacity = BuildNewElementsCapacity(key); HValue* new_elements = BuildGrowElementsCapacity(object, elements, @@ -1794,7 +1765,8 @@ HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, HValue* right_length) { // Compute the combined string length and check against max string length. HValue* length = AddUncasted<HAdd>(left_length, right_length); - HValue* max_length = Add<HConstant>(String::kMaxLength); + // Check that length <= kMaxLength <=> length < MaxLength + 1. + HValue* max_length = Add<HConstant>(String::kMaxLength + 1); Add<HBoundsCheck>(length, max_length); return length; } @@ -2167,13 +2139,13 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( if (access_type == STORE && (fast_elements || fast_smi_only_elements) && store_mode != STORE_NO_TRANSITION_HANDLE_COW) { HCheckMaps* check_cow_map = Add<HCheckMaps>( - elements, isolate()->factory()->fixed_array_map(), top_info()); + elements, isolate()->factory()->fixed_array_map()); check_cow_map->ClearDependsOnFlag(kElementsKind); } HInstruction* length = NULL; if (is_js_array) { length = Add<HLoadNamedField>( - checked_object, static_cast<HValue*>(NULL), + checked_object->ActualValue(), checked_object, HObjectAccess::ForArrayLength(elements_kind)); } else { length = AddLoadFixedArrayLength(elements); @@ -2241,7 +2213,7 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( elements_kind, length); } else { HCheckMaps* check_cow_map = Add<HCheckMaps>( - elements, isolate()->factory()->fixed_array_map(), top_info()); + elements, isolate()->factory()->fixed_array_map()); check_cow_map->ClearDependsOnFlag(kElementsKind); } } @@ -2620,6 +2592,15 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, } if (length > 0) { + // We have to initialize the elements pointer if allocation folding is + // turned off. + if (!FLAG_use_gvn || !FLAG_use_allocation_folding) { + HConstant* empty_fixed_array = Add<HConstant>( + isolate()->factory()->empty_fixed_array()); + Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), + empty_fixed_array, INITIALIZING_STORE); + } + HValue* boilerplate_elements = AddLoadElements(boilerplate); HValue* object_elements; if (IsFastDoubleElementsKind(kind)) { @@ -2700,7 +2681,7 @@ void HGraphBuilder::BuildCompareNil( // the monomorphic map when the code is used as a template to generate a // new IC. For optimized functions, there is no sentinel map, the map // emitted below is the actual monomorphic map. - BuildCheckMap(value, type->Classes().Current()); + Add<HCheckMaps>(value, type->Classes().Current()); } else { if_nil.Deopt("Too many undetectable types"); } @@ -3090,8 +3071,7 @@ HGraph::HGraph(CompilationInfo* info) inlined_functions_(5, info->zone()) { if (info->IsStub()) { HydrogenCodeStub* stub = info->code_stub(); - CodeStubInterfaceDescriptor* descriptor = - stub->GetInterfaceDescriptor(isolate_); + CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor(); start_environment_ = new(zone_) HEnvironment(zone_, descriptor->environment_length()); } else { @@ -4864,7 +4844,7 @@ HOptimizedGraphBuilder::GlobalPropertyAccess return kUseGeneric; } Handle<GlobalObject> global(current_info()->global_object()); - global->Lookup(*var->name(), lookup); + global->Lookup(var->name(), lookup); if (!lookup->IsNormal() || (access_type == STORE && lookup->IsReadOnly()) || lookup->holder() != *global) { @@ -4924,11 +4904,11 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { Handle<GlobalObject> global(current_info()->global_object()); Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); if (cell->type()->IsConstant()) { - cell->AddDependentCompilationInfo(top_info()); - Handle<Object> constant_object = cell->type()->AsConstant(); + PropertyCell::AddDependentCompilationInfo(cell, top_info()); + Handle<Object> constant_object = cell->type()->AsConstant()->Value(); if (constant_object->IsConsString()) { constant_object = - FlattenGetString(Handle<String>::cast(constant_object)); + String::Flatten(Handle<String>::cast(constant_object)); } HConstant* constant = New<HConstant>(constant_object); return ast_context()->ReturnInstruction(constant, expr->id()); @@ -4998,7 +4978,7 @@ void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { static bool CanInlinePropertyAccess(Type* type) { if (type->Is(Type::NumberOrString())) return true; if (!type->IsClass()) return false; - Handle<Map> map = type->AsClass(); + Handle<Map> map = type->AsClass()->Map(); return map->IsJSObjectMap() && !map->is_dictionary_map() && !map->has_named_interceptor(); @@ -5011,9 +4991,9 @@ static bool CanInlinePropertyAccess(Type* type) { static bool IsFastLiteral(Handle<JSObject> boilerplate, int max_depth, int* max_properties) { - if (boilerplate->map()->is_deprecated()) { - Handle<Object> result = JSObject::TryMigrateInstance(boilerplate); - if (result.is_null()) return false; + if (boilerplate->map()->is_deprecated() && + !JSObject::TryMigrateInstance(boilerplate)) { + return false; } ASSERT(max_depth >= 0 && *max_properties >= 0); @@ -5153,7 +5133,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { } else { PropertyAccessInfo info(this, STORE, ToType(map), name); if (info.CanAccessMonomorphic()) { - HValue* checked_literal = BuildCheckMap(literal, map); + HValue* checked_literal = Add<HCheckMaps>(literal, map); ASSERT(!info.lookup()->IsPropertyCallbacks()); store = BuildMonomorphicAccess( &info, literal, checked_literal, value, @@ -5212,11 +5192,12 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { Handle<JSObject> boilerplate_object; if (literals_cell->IsUndefined()) { uninitialized = true; - Handle<Object> raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate( - isolate(), literals, expr->constant_elements()); - if (raw_boilerplate.is_null()) { - return Bailout(kArrayBoilerplateCreationFailed); - } + Handle<Object> raw_boilerplate; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate(), raw_boilerplate, + Runtime::CreateArrayLiteralBoilerplate( + isolate(), literals, expr->constant_elements()), + Bailout(kArrayBoilerplateCreationFailed)); boilerplate_object = Handle<JSObject>::cast(raw_boilerplate); AllocationSiteCreationContext creation_context(isolate()); @@ -5279,7 +5260,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { // De-opt if elements kind changed from boilerplate_elements_kind. Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate()); - literal = Add<HCheckMaps>(literal, map, top_info()); + literal = Add<HCheckMaps>(literal, map); } // The array is expected in the bailout environment during computation @@ -5332,13 +5313,31 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, Handle<Map> map) { BuildCheckHeapObject(object); - return Add<HCheckMaps>(object, map, top_info()); + return Add<HCheckMaps>(object, map); } HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( PropertyAccessInfo* info, HValue* checked_object) { + // See if this is a load for an immutable property + if (checked_object->ActualValue()->IsConstant() && + info->lookup()->IsCacheable() && + info->lookup()->IsReadOnly() && info->lookup()->IsDontDelete()) { + Handle<Object> object( + HConstant::cast(checked_object->ActualValue())->handle(isolate())); + + if (object->IsJSObject()) { + LookupResult lookup(isolate()); + Handle<JSObject>::cast(object)->Lookup(info->name(), &lookup); + Handle<Object> value(lookup.GetLazyValue(), isolate()); + + if (!value->IsTheHole()) { + return New<HConstant>(value); + } + } + } + HObjectAccess access = info->access(); if (access.representation().IsDouble()) { // Load the heap number. @@ -5349,8 +5348,24 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } + + SmallMapList* map_list = info->field_maps(); + if (map_list->length() == 0) { + return New<HLoadNamedField>(checked_object, checked_object, access); + } + + UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone()); + for (int i = 0; i < map_list->length(); ++i) { + Handle<Map> map = map_list->at(i); + maps->Add(Unique<Map>::CreateImmovable(map), zone()); + // TODO(bmeurer): Get rid of this shit! + if (map->CanTransition()) { + Map::AddDependentCompilationInfo( + map, DependentCode::kPrototypeCheckGroup, top_info()); + } + } return New<HLoadNamedField>( - checked_object, static_cast<HValue*>(NULL), access); + checked_object, checked_object, access, maps, info->field_type()); } @@ -5360,8 +5375,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( HValue* value) { bool transition_to_field = info->lookup()->IsTransition(); // TODO(verwaest): Move this logic into PropertyAccessInfo. - HObjectAccess field_access = HObjectAccess::ForField( - info->map(), info->lookup(), info->name()); + HObjectAccess field_access = info->access(); HStoreNamedField *instr; if (field_access.representation().IsDouble()) { @@ -5395,6 +5409,18 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( value, STORE_TO_INITIALIZED_ENTRY); } } else { + if (!info->field_maps()->is_empty()) { + ASSERT(field_access.representation().IsHeapObject()); + BuildCheckHeapObject(value); + value = Add<HCheckMaps>(value, info->field_maps()); + + // TODO(bmeurer): This is a dirty hack to avoid repeating the smi check + // that was already performed by the HCheckHeapObject above in the + // HStoreNamedField below. We should really do this right instead and + // make Crankshaft aware of Representation::HeapObject(). + field_access = field_access.WithRepresentation(Representation::Tagged()); + } + // This is a normal store. instr = New<HStoreNamedField>( checked_object->ActualValue(), field_access, value, @@ -5402,9 +5428,9 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( } if (transition_to_field) { - HConstant* transition_constant = Add<HConstant>(info->transition()); - instr->SetTransition(transition_constant, top_info()); - instr->SetChangesFlag(kMaps); + Handle<Map> transition(info->transition()); + ASSERT(!transition->is_deprecated()); + instr->SetTransition(Add<HConstant>(transition)); } return instr; } @@ -5458,7 +5484,27 @@ bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible( } if (info->access_.offset() != access_.offset()) return false; if (info->access_.IsInobject() != access_.IsInobject()) return false; + if (IsLoad()) { + if (field_maps_.is_empty()) { + info->field_maps_.Clear(); + } else if (!info->field_maps_.is_empty()) { + for (int i = 0; i < field_maps_.length(); ++i) { + info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone()); + } + info->field_maps_.Sort(); + } + } else { + // We can only merge stores that agree on their field maps. The comparison + // below is safe, since we keep the field maps sorted. + if (field_maps_.length() != info->field_maps_.length()) return false; + for (int i = 0; i < field_maps_.length(); ++i) { + if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) { + return false; + } + } + } info->GeneralizeRepresentation(r); + info->field_type_ = info->field_type_.Combine(field_type_); return true; } @@ -5477,7 +5523,11 @@ bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { } if (lookup_.IsField()) { + // Construct the object field access. access_ = HObjectAccess::ForField(map, &lookup_, name_); + + // Load field map for heap objects. + LoadFieldMaps(map); } else if (lookup_.IsPropertyCallbacks()) { Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate()); if (!callback->IsAccessorPair()) return false; @@ -5488,16 +5538,11 @@ bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor)); if (accessor->shared()->IsApiFunction()) { CallOptimization call_optimization(accessor); - if (!call_optimization.is_simple_api_call()) return false; - CallOptimization::HolderLookup holder_lookup; - api_holder_ = call_optimization.LookupHolderOfExpectedType( - map, &holder_lookup); - switch (holder_lookup) { - case CallOptimization::kHolderNotFound: - return false; - case CallOptimization::kHolderIsReceiver: - case CallOptimization::kHolderFound: - break; + if (call_optimization.is_simple_api_call()) { + CallOptimization::HolderLookup holder_lookup; + Handle<Map> receiver_map = this->map(); + api_holder_ = call_optimization.LookupHolderOfExpectedType( + receiver_map, &holder_lookup); } } accessor_ = accessor; @@ -5509,6 +5554,56 @@ bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { } +void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps( + Handle<Map> map) { + // Clear any previously collected field maps/type. + field_maps_.Clear(); + field_type_ = HType::Tagged(); + + // Figure out the field type from the accessor map. + Handle<HeapType> field_type(lookup_.GetFieldTypeFromMap(*map), isolate()); + + // Collect the (stable) maps from the field type. + int num_field_maps = field_type->NumClasses(); + if (num_field_maps == 0) return; + ASSERT(access_.representation().IsHeapObject()); + field_maps_.Reserve(num_field_maps, zone()); + HeapType::Iterator<Map> it = field_type->Classes(); + while (!it.Done()) { + Handle<Map> field_map = it.Current(); + if (!field_map->is_stable()) { + field_maps_.Clear(); + return; + } + field_maps_.Add(field_map, zone()); + it.Advance(); + } + field_maps_.Sort(); + ASSERT_EQ(num_field_maps, field_maps_.length()); + + // Determine field HType from field HeapType. + if (field_type->Is(HeapType::Number())) { + field_type_ = HType::HeapNumber(); + } else if (field_type->Is(HeapType::String())) { + field_type_ = HType::String(); + } else if (field_type->Is(HeapType::Boolean())) { + field_type_ = HType::Boolean(); + } else if (field_type->Is(HeapType::Array())) { + field_type_ = HType::JSArray(); + } else if (field_type->Is(HeapType::Object())) { + field_type_ = HType::JSObject(); + } else if (field_type->Is(HeapType::Null()) || + field_type->Is(HeapType::Undefined())) { + field_type_ = HType::NonPrimitive(); + } + + // Add dependency on the map that introduced the field. + Map::AddDependentCompilationInfo( + handle(lookup_.GetFieldOwnerFromMap(*map), isolate()), + DependentCode::kFieldTypeGroup, top_info()); +} + + bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() { Handle<Map> map = this->map(); @@ -5545,6 +5640,11 @@ bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() { Handle<Map> map = this->map(); map->LookupTransition(NULL, *name_, &lookup_); if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) { + // Construct the object field access. + access_ = HObjectAccess::ForField(map, &lookup_, name_); + + // Load field map for heap objects. + LoadFieldMaps(transition()); return true; } return false; @@ -5652,7 +5752,7 @@ HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess( ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id) : TryInlineSetter( info->accessor(), info->map(), ast_id, return_id, value); - if (success) return NULL; + if (success || HasStackOverflow()) return NULL; } PushArgumentsFromEnvironment(argument_count); @@ -5899,7 +5999,7 @@ void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( Handle<GlobalObject> global(current_info()->global_object()); Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); if (cell->type()->IsConstant()) { - Handle<Object> constant = cell->type()->AsConstant(); + Handle<Object> constant = cell->type()->AsConstant()->Value(); if (value->IsConstant()) { HConstant* c_value = HConstant::cast(value); if (!constant.is_identical_to(c_value->handle(isolate()))) { @@ -6284,8 +6384,7 @@ HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess( Handle<Map> map, PropertyAccessType access_type, KeyedAccessStoreMode store_mode) { - HCheckMaps* checked_object = Add<HCheckMaps>(object, map, top_info(), - dependency); + HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency); if (dependency) { checked_object->ClearDependsOnFlag(kElementsKind); } @@ -6764,19 +6863,9 @@ void HOptimizedGraphBuilder::VisitProperty(Property* expr) { } -HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant, - CompilationInfo* info) { - HConstant* constant_value = New<HConstant>(constant); - - if (constant->map()->CanOmitMapChecks()) { - constant->map()->AddDependentCompilationInfo( - DependentCode::kPrototypeCheckGroup, info); - return constant_value; - } - - AddInstruction(constant_value); - HCheckMaps* check = - Add<HCheckMaps>(constant_value, handle(constant->map()), info); +HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) { + HCheckMaps* check = Add<HCheckMaps>( + Add<HConstant>(constant), handle(constant->map())); check->ClearDependsOnFlag(kElementsKind); return check; } @@ -6784,14 +6873,14 @@ HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant, HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype, Handle<JSObject> holder) { - while (!prototype.is_identical_to(holder)) { - BuildConstantMapCheck(prototype, top_info()); - prototype = handle(JSObject::cast(prototype->GetPrototype())); + while (holder.is_null() || !prototype.is_identical_to(holder)) { + BuildConstantMapCheck(prototype); + Object* next_prototype = prototype->GetPrototype(); + if (next_prototype->IsNull()) return NULL; + CHECK(next_prototype->IsJSObject()); + prototype = handle(JSObject::cast(next_prototype)); } - - HInstruction* checked_object = BuildConstantMapCheck(prototype, top_info()); - if (!checked_object->IsLinked()) AddInstruction(checked_object); - return checked_object; + return BuildConstantMapCheck(prototype); } @@ -6861,13 +6950,36 @@ HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction( } +class FunctionSorter { + public: + FunctionSorter(int index = 0, int ticks = 0, int size = 0) + : index_(index), ticks_(ticks), size_(size) { } + + int index() const { return index_; } + int ticks() const { return ticks_; } + int size() const { return size_; } + + private: + int index_; + int ticks_; + int size_; +}; + + +inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) { + int diff = lhs.ticks() - rhs.ticks(); + if (diff != 0) return diff > 0; + return lhs.size() < rhs.size(); +} + + void HOptimizedGraphBuilder::HandlePolymorphicCallNamed( Call* expr, HValue* receiver, SmallMapList* types, Handle<String> name) { int argument_count = expr->arguments()->length() + 1; // Includes receiver. - int order[kMaxCallPolymorphism]; + FunctionSorter order[kMaxCallPolymorphism]; bool handle_smi = false; bool handled_string = false; @@ -6889,17 +7001,20 @@ void HOptimizedGraphBuilder::HandlePolymorphicCallNamed( handle_smi = true; } expr->set_target(target); - order[ordered_functions++] = i; + order[ordered_functions++] = FunctionSorter( + i, target->shared()->profiler_ticks(), InliningAstSize(target)); } } + std::sort(order, order + ordered_functions); + HBasicBlock* number_block = NULL; HBasicBlock* join = NULL; handled_string = false; int count = 0; for (int fn = 0; fn < ordered_functions; ++fn) { - int i = order[fn]; + int i = order[fn].index(); PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name); if (info.type()->Is(Type::String())) { if (handled_string) continue; @@ -7071,6 +7186,11 @@ int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) { return target_shared->inline_builtin() ? 0 : kNotInlinable; } + if (target_shared->IsApiFunction()) { + TraceInline(target, caller, "target is api function"); + return kNotInlinable; + } + // Do a quick check on source code length to avoid parsing large // inlining candidates. if (target_shared->SourceSize() > @@ -7219,6 +7339,7 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, target_shared->set_scope_info(*target_scope_info); } target_shared->EnableDeoptimizationSupport(*target_info.code()); + target_shared->set_feedback_vector(*target_info.feedback_vector()); Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG, &target_info, target_shared); @@ -7252,8 +7373,6 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, HConstant* context = Add<HConstant>(Handle<Context>(target->context())); inner_env->BindContext(context); - Add<HSimulate>(return_id); - current_block()->UpdateEnvironment(inner_env); HArgumentsObject* arguments_object = NULL; // If the function uses arguments object create and bind one, also copy @@ -7269,8 +7388,17 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, } } + // Capture the state before invoking the inlined function for deopt in the + // inlined function. This simulate has no bailout-id since it's not directly + // reachable for deopt, and is only used to capture the state. If the simulate + // becomes reachable by merging, the ast id of the simulate merged into it is + // adopted. + Add<HSimulate>(BailoutId::None()); + + current_block()->UpdateEnvironment(inner_env); + HEnterInlined* enter_inlined = - Add<HEnterInlined>(target, arguments_count, function, + Add<HEnterInlined>(return_id, target, arguments_count, function, function_state()->inlining_kind(), function->scope()->arguments(), arguments_object); @@ -7596,6 +7724,8 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; ElementsKind elements_kind = receiver_map->elements_kind(); if (!IsFastElementsKind(elements_kind)) return false; + if (receiver_map->is_observed()) return false; + ASSERT(receiver_map->is_extensible()); Drop(expr->arguments()->length()); HValue* result; @@ -7658,32 +7788,51 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; ElementsKind elements_kind = receiver_map->elements_kind(); if (!IsFastElementsKind(elements_kind)) return false; - - HValue* op_vals[] = { - context(), - // Receiver. - environment()->ExpressionStackAt(expr->arguments()->length()) - }; + if (receiver_map->is_observed()) return false; + ASSERT(receiver_map->is_extensible()); + + // If there may be elements accessors in the prototype chain, the fast + // inlined version can't be used. + if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false; + // If there currently can be no elements accessors on the prototype chain, + // it doesn't mean that there won't be any later. Install a full prototype + // chain check to trap element accessors being installed on the prototype + // chain, which would cause elements to go to dictionary mode and result + // in a map change. + Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype())); + BuildCheckPrototypeMaps(prototype, Handle<JSObject>()); const int argc = expr->arguments()->length(); - // Includes receiver. - PushArgumentsFromEnvironment(argc + 1); + if (argc != 1) return false; - CallInterfaceDescriptor* descriptor = - isolate()->call_descriptor(Isolate::CallHandler); + HValue* value_to_push = Pop(); + HValue* array = Pop(); + Drop(1); // Drop function. - ArrayPushStub stub(receiver_map->elements_kind(), argc); - Handle<Code> code = stub.GetCode(isolate()); - HConstant* code_value = Add<HConstant>(code); + HInstruction* new_size = NULL; + HValue* length = NULL; - ASSERT((sizeof(op_vals) / kPointerSize) == - descriptor->environment_length()); + { + NoObservableSideEffectsScope scope(this); - HInstruction* call = New<HCallWithDescriptor>( - code_value, argc + 1, descriptor, - Vector<HValue*>(op_vals, descriptor->environment_length())); - Drop(1); // Drop function. - ast_context()->ReturnInstruction(call, expr->id()); + length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL), + HObjectAccess::ForArrayLength(elements_kind)); + + new_size = AddUncasted<HAdd>(length, graph()->GetConstant1()); + + bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE; + BuildUncheckedMonomorphicElementAccess(array, length, + value_to_push, is_array, + elements_kind, STORE, + NEVER_RETURN_HOLE, + STORE_AND_GROW_NO_TRANSITION); + + if (!ast_context()->IsEffect()) Push(new_size); + Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); + if (!ast_context()->IsEffect()) Drop(1); + } + + ast_context()->ReturnValue(new_size); return true; } default: @@ -7763,7 +7912,7 @@ bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function, if (call_type == kCallApiFunction) { // Cannot embed a direct reference to the global proxy map // as it maybe dropped on deserialization. - CHECK(!Serializer::enabled()); + CHECK(!Serializer::enabled(isolate())); ASSERT_EQ(0, receiver_maps->length()); receiver_maps->Add(handle( function->context()->global_object()->global_receiver()->map()), @@ -7855,8 +8004,8 @@ bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function, CallInterfaceDescriptor* descriptor = isolate()->call_descriptor(Isolate::ApiFunctionCall); - CallApiFunctionStub stub(is_store, call_data_is_undefined, argc); - Handle<Code> code = stub.GetCode(isolate()); + CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc); + Handle<Code> code = stub.GetCode(); HConstant* code_value = Add<HConstant>(code); ASSERT((sizeof(op_vals) / kPointerSize) == @@ -7950,7 +8099,7 @@ HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function, if (shared->strict_mode() == SLOPPY && !shared->native()) { // Cannot embed a direct reference to the global proxy // as is it dropped on deserialization. - CHECK(!Serializer::enabled()); + CHECK(!Serializer::enabled(isolate())); Handle<JSObject> global_receiver( target->context()->global_object()->global_receiver()); return Add<HConstant>(global_receiver); @@ -8510,6 +8659,10 @@ HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements( HValue* buffer, HValue* byte_offset, HValue* length) { Handle<Map> external_array_map( isolate()->heap()->MapForExternalArrayType(array_type)); + + // The HForceRepresentation is to prevent possible deopt on int-smi + // conversion after allocation but before the new object fields are set. + length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); HValue* elements = Add<HAllocate>( Add<HConstant>(ExternalArray::kAlignedSize), @@ -8518,6 +8671,8 @@ HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements( external_array_map->instance_type()); AddStoreMapConstant(elements, external_array_map); + Add<HStoreNamedField>(elements, + HObjectAccess::ForFixedArrayLength(), length); HValue* backing_store = Add<HLoadNamedField>( buffer, static_cast<HValue*>(NULL), @@ -8535,13 +8690,10 @@ HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements( typed_array_start = external_pointer; } - Add<HStoreNamedField>(elements, HObjectAccess::ForExternalArrayExternalPointer(), typed_array_start); - Add<HStoreNamedField>(elements, - HObjectAccess::ForFixedArrayLength(), length); return elements; } @@ -8565,6 +8717,9 @@ HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray( total_size->ClearFlag(HValue::kCanOverflow); } + // The HForceRepresentation is to prevent possible deopt on int-smi + // conversion after allocation but before the new object fields are set. + length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); Handle<Map> fixed_typed_array_map( isolate()->heap()->MapForFixedTypedArray(array_type)); HValue* elements = @@ -8576,6 +8731,7 @@ HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray( Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), length); + HValue* filler = Add<HConstant>(static_cast<int32_t>(0)); { @@ -8588,8 +8744,6 @@ HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray( builder.EndBody(); } - Add<HStoreNamedField>( - elements, HObjectAccess::ForFixedArrayLength(), length); return elements; } @@ -8725,6 +8879,58 @@ void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap( } +void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength( + CallRuntime* expr) { + ASSERT(expr->arguments()->length() == 1); + CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); + HValue* buffer = Pop(); + HInstruction* result = New<HLoadNamedField>( + buffer, + static_cast<HValue*>(NULL), + HObjectAccess::ForJSArrayBufferByteLength()); + return ast_context()->ReturnInstruction(result, expr->id()); +} + + +void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength( + CallRuntime* expr) { + ASSERT(expr->arguments()->length() == 1); + CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); + HValue* buffer = Pop(); + HInstruction* result = New<HLoadNamedField>( + buffer, + static_cast<HValue*>(NULL), + HObjectAccess::ForJSArrayBufferViewByteLength()); + return ast_context()->ReturnInstruction(result, expr->id()); +} + + +void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset( + CallRuntime* expr) { + ASSERT(expr->arguments()->length() == 1); + CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); + HValue* buffer = Pop(); + HInstruction* result = New<HLoadNamedField>( + buffer, + static_cast<HValue*>(NULL), + HObjectAccess::ForJSArrayBufferViewByteOffset()); + return ast_context()->ReturnInstruction(result, expr->id()); +} + + +void HOptimizedGraphBuilder::GenerateTypedArrayGetLength( + CallRuntime* expr) { + ASSERT(expr->arguments()->length() == 1); + CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); + HValue* buffer = Pop(); + HInstruction* result = New<HLoadNamedField>( + buffer, + static_cast<HValue*>(NULL), + HObjectAccess::ForJSTypedArrayLength()); + return ast_context()->ReturnInstruction(result, expr->id()); +} + + void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) { ASSERT(!HasStackOverflow()); ASSERT(current_block() != NULL); @@ -9686,7 +9892,7 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { Handle<String> name = proxy->name(); Handle<GlobalObject> global(current_info()->global_object()); LookupResult lookup(isolate()); - global->Lookup(*name, &lookup); + global->Lookup(name, &lookup); if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) { Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue())); // If the function is in new space we assume it's more likely to @@ -9760,11 +9966,22 @@ HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction( if (combined_type->Is(Type::Receiver())) { if (Token::IsEqualityOp(op)) { + // HCompareObjectEqAndBranch can only deal with object, so + // exclude numbers. + if ((left->IsConstant() && + HConstant::cast(left)->HasNumberValue()) || + (right->IsConstant() && + HConstant::cast(right)->HasNumberValue())) { + Add<HDeoptimize>("Type mismatch between feedback and constant", + Deoptimizer::SOFT); + // The caller expects a branch instruction, so make it happy. + return New<HBranch>(graph()->GetConstantTrue()); + } // Can we get away with map check and not instance type check? HValue* operand_to_check = left->block()->block_id() < right->block()->block_id() ? left : right; if (combined_type->IsClass()) { - Handle<Map> map = combined_type->AsClass(); + Handle<Map> map = combined_type->AsClass()->Map(); AddCheckMap(operand_to_check, map); HCompareObjectEqAndBranch* result = New<HCompareObjectEqAndBranch>(left, right); @@ -9787,6 +10004,17 @@ HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction( } } else if (combined_type->Is(Type::InternalizedString()) && Token::IsEqualityOp(op)) { + // If we have a constant argument, it should be consistent with the type + // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch). + if ((left->IsConstant() && + !HConstant::cast(left)->HasInternalizedStringValue()) || + (right->IsConstant() && + !HConstant::cast(right)->HasInternalizedStringValue())) { + Add<HDeoptimize>("Type mismatch between feedback and constant", + Deoptimizer::SOFT); + // The caller expects a branch instruction, so make it happy. + return New<HBranch>(graph()->GetConstantTrue()); + } BuildCheckHeapObject(left); Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING); BuildCheckHeapObject(right); @@ -10602,12 +10830,6 @@ void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) { } -void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) { - // %_Log is ignored in optimized code. - return ast_context()->ReturnValue(graph()->GetConstantUndefined()); -} - - // Fast support for StringAdd. void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) { ASSERT_EQ(2, call->arguments()->length()); @@ -10817,6 +11039,14 @@ void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode( } +void HOptimizedGraphBuilder::GenerateDebugCallbackSupportsStepping( + CallRuntime* call) { + ASSERT(call->arguments()->length() == 1); + // Debugging is not supported in optimized code. + return ast_context()->ReturnValue(graph()->GetConstantFalse()); +} + + #undef CHECK_BAILOUT #undef CHECK_ALIVE diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h index 6d81307e2..d20a81771 100644 --- a/deps/v8/src/hydrogen.h +++ b/deps/v8/src/hydrogen.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_H_ #define V8_HYDROGEN_H_ @@ -438,9 +415,11 @@ class HGraph V8_FINAL : public ZoneObject { void MarkDependsOnEmptyArrayProtoElements() { // Add map dependency if not already added. if (depends_on_empty_array_proto_elements_) return; - isolate()->initial_object_prototype()->map()->AddDependentCompilationInfo( + Map::AddDependentCompilationInfo( + handle(isolate()->initial_object_prototype()->map()), DependentCode::kElementsCantBeAddedGroup, info()); - isolate()->initial_array_prototype()->map()->AddDependentCompilationInfo( + Map::AddDependentCompilationInfo( + handle(isolate()->initial_array_prototype()->map()), DependentCode::kElementsCantBeAddedGroup, info()); depends_on_empty_array_proto_elements_ = true; } @@ -1319,7 +1298,6 @@ class HGraphBuilder { HBasicBlock* CreateLoopHeaderBlock(); HValue* BuildCheckHeapObject(HValue* object); - HValue* BuildCheckMap(HValue* obj, Handle<Map> map); HValue* BuildCheckString(HValue* string); HValue* BuildWrapReceiver(HValue* object, HValue* function); @@ -1802,8 +1780,7 @@ class HGraphBuilder { HValue* previous_object_size, HValue* payload); - HInstruction* BuildConstantMapCheck(Handle<JSObject> constant, - CompilationInfo* info); + HInstruction* BuildConstantMapCheck(Handle<JSObject> constant); HInstruction* BuildCheckPrototypeMaps(Handle<JSObject> prototype, Handle<JSObject> holder); @@ -2349,6 +2326,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { access_type_(access_type), type_(type), name_(name), + field_type_(HType::Tagged()), access_(HObjectAccess::ForMap()) { } // Checkes whether this PropertyAccessInfo can be handled as a monomorphic @@ -2378,7 +2356,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { context = context->native_context(); return handle(context->string_function()->initial_map()); } else { - return type_->AsClass(); + return type_->AsClass()->Map(); } } Type* type() const { return type_; } @@ -2393,10 +2371,10 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { int offset; if (Accessors::IsJSObjectFieldAccessor<Type>(type_, name_, &offset)) { if (type_->Is(Type::String())) { - ASSERT(name_->Equals(isolate()->heap()->length_string())); + ASSERT(String::Equals(isolate()->factory()->length_string(), name_)); *access = HObjectAccess::ForStringLength(); } else if (type_->Is(Type::Array())) { - ASSERT(name_->Equals(isolate()->heap()->length_string())); + ASSERT(String::Equals(isolate()->factory()->length_string(), name_)); *access = HObjectAccess::ForArrayLength(map()->elements_kind()); } else { *access = HObjectAccess::ForMapAndOffset(map(), offset); @@ -2414,14 +2392,19 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { Handle<JSFunction> accessor() { return accessor_; } Handle<Object> constant() { return constant_; } Handle<Map> transition() { return handle(lookup_.GetTransitionTarget()); } + SmallMapList* field_maps() { return &field_maps_; } + HType field_type() const { return field_type_; } HObjectAccess access() { return access_; } private: Type* ToType(Handle<Map> map) { return builder_->ToType(map); } + Zone* zone() { return builder_->zone(); } Isolate* isolate() { return lookup_.isolate(); } + CompilationInfo* top_info() { return builder_->top_info(); } CompilationInfo* current_info() { return builder_->current_info(); } bool LoadResult(Handle<Map> map); + void LoadFieldMaps(Handle<Map> map); bool LookupDescriptor(); bool LookupInPrototypes(); bool IsCompatible(PropertyAccessInfo* other); @@ -2440,6 +2423,8 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { Handle<JSFunction> accessor_; Handle<JSObject> api_holder_; Handle<Object> constant_; + SmallMapList field_maps_; + HType field_type_; HObjectAccess access_; }; diff --git a/deps/v8/src/i18n.cc b/deps/v8/src/i18n.cc index d5ea77dbd..2b6b0fb07 100644 --- a/deps/v8/src/i18n.cc +++ b/deps/v8/src/i18n.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // limitations under the License. #include "i18n.h" @@ -57,12 +34,11 @@ bool ExtractStringSetting(Isolate* isolate, Handle<JSObject> options, const char* key, icu::UnicodeString* setting) { - Handle<String> str = isolate->factory()->NewStringFromAscii(CStrVector(key)); - MaybeObject* maybe_object = options->GetProperty(*str); - Object* object; - if (maybe_object->ToObject(&object) && object->IsString()) { + Handle<String> str = isolate->factory()->NewStringFromAsciiChecked(key); + Handle<Object> object = Object::GetProperty(options, str).ToHandleChecked(); + if (object->IsString()) { v8::String::Utf8Value utf8_string( - v8::Utils::ToLocal(Handle<String>(String::cast(object)))); + v8::Utils::ToLocal(Handle<String>::cast(object))); *setting = icu::UnicodeString::fromUTF8(*utf8_string); return true; } @@ -74,10 +50,9 @@ bool ExtractIntegerSetting(Isolate* isolate, Handle<JSObject> options, const char* key, int32_t* value) { - Handle<String> str = isolate->factory()->NewStringFromAscii(CStrVector(key)); - MaybeObject* maybe_object = options->GetProperty(*str); - Object* object; - if (maybe_object->ToObject(&object) && object->IsNumber()) { + Handle<String> str = isolate->factory()->NewStringFromAsciiChecked(key); + Handle<Object> object = Object::GetProperty(options, str).ToHandleChecked(); + if (object->IsNumber()) { object->ToInt32(value); return true; } @@ -89,10 +64,9 @@ bool ExtractBooleanSetting(Isolate* isolate, Handle<JSObject> options, const char* key, bool* value) { - Handle<String> str = isolate->factory()->NewStringFromAscii(CStrVector(key)); - MaybeObject* maybe_object = options->GetProperty(*str); - Object* object; - if (maybe_object->ToObject(&object) && object->IsBoolean()) { + Handle<String> str = isolate->factory()->NewStringFromAsciiChecked(key); + Handle<Object> object = Object::GetProperty(options, str).ToHandleChecked(); + if (object->IsBoolean()) { *value = object->BooleanValue(); return true; } @@ -152,28 +126,29 @@ void SetResolvedDateSettings(Isolate* isolate, const icu::Locale& icu_locale, icu::SimpleDateFormat* date_format, Handle<JSObject> resolved) { + Factory* factory = isolate->factory(); UErrorCode status = U_ZERO_ERROR; icu::UnicodeString pattern; date_format->toPattern(pattern); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("pattern")), - isolate->factory()->NewStringFromTwoByte( + factory->NewStringFromStaticAscii("pattern"), + factory->NewStringFromTwoByte( Vector<const uint16_t>( reinterpret_cast<const uint16_t*>(pattern.getBuffer()), - pattern.length())), + pattern.length())).ToHandleChecked(), NONE, - SLOPPY); + SLOPPY).Assert(); // Set time zone and calendar. const icu::Calendar* calendar = date_format->getCalendar(); const char* calendar_name = calendar->getType(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("calendar")), - isolate->factory()->NewStringFromAscii(CStrVector(calendar_name)), + factory->NewStringFromStaticAscii("calendar"), + factory->NewStringFromAsciiChecked(calendar_name), NONE, - SLOPPY); + SLOPPY).Assert(); const icu::TimeZone& tz = calendar->getTimeZone(); icu::UnicodeString time_zone; @@ -185,21 +160,21 @@ void SetResolvedDateSettings(Isolate* isolate, if (canonical_time_zone == UNICODE_STRING_SIMPLE("Etc/GMT")) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("timeZone")), - isolate->factory()->NewStringFromAscii(CStrVector("UTC")), + factory->NewStringFromStaticAscii("timeZone"), + factory->NewStringFromStaticAscii("UTC"), NONE, - SLOPPY); + SLOPPY).Assert(); } else { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("timeZone")), - isolate->factory()->NewStringFromTwoByte( + factory->NewStringFromStaticAscii("timeZone"), + factory->NewStringFromTwoByte( Vector<const uint16_t>( reinterpret_cast<const uint16_t*>( canonical_time_zone.getBuffer()), - canonical_time_zone.length())), + canonical_time_zone.length())).ToHandleChecked(), NONE, - SLOPPY); + SLOPPY).Assert(); } } @@ -213,17 +188,17 @@ void SetResolvedDateSettings(Isolate* isolate, const char* ns = numbering_system->getName(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")), - isolate->factory()->NewStringFromAscii(CStrVector(ns)), + factory->NewStringFromStaticAscii("numberingSystem"), + factory->NewStringFromAsciiChecked(ns), NONE, - SLOPPY); + SLOPPY).Assert(); } else { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")), - isolate->factory()->undefined_value(), + factory->NewStringFromStaticAscii("numberingSystem"), + factory->undefined_value(), NONE, - SLOPPY); + SLOPPY).Assert(); } delete numbering_system; @@ -235,18 +210,18 @@ void SetResolvedDateSettings(Isolate* isolate, if (U_SUCCESS(status)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector(result)), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromAsciiChecked(result), NONE, - SLOPPY); + SLOPPY).Assert(); } else { // This would never happen, since we got the locale from ICU. JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector("und")), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromStaticAscii("und"), NONE, - SLOPPY); + SLOPPY).Assert(); } } @@ -379,30 +354,31 @@ void SetResolvedNumberSettings(Isolate* isolate, const icu::Locale& icu_locale, icu::DecimalFormat* number_format, Handle<JSObject> resolved) { + Factory* factory = isolate->factory(); icu::UnicodeString pattern; number_format->toPattern(pattern); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("pattern")), - isolate->factory()->NewStringFromTwoByte( + factory->NewStringFromStaticAscii("pattern"), + factory->NewStringFromTwoByte( Vector<const uint16_t>( reinterpret_cast<const uint16_t*>(pattern.getBuffer()), - pattern.length())), + pattern.length())).ToHandleChecked(), NONE, - SLOPPY); + SLOPPY).Assert(); // Set resolved currency code in options.currency if not empty. icu::UnicodeString currency(number_format->getCurrency()); if (!currency.isEmpty()) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("currency")), - isolate->factory()->NewStringFromTwoByte( + factory->NewStringFromStaticAscii("currency"), + factory->NewStringFromTwoByte( Vector<const uint16_t>( reinterpret_cast<const uint16_t*>(currency.getBuffer()), - currency.length())), + currency.length())).ToHandleChecked(), NONE, - SLOPPY); + SLOPPY).Assert(); } // Ugly hack. ICU doesn't expose numbering system in any way, so we have @@ -415,78 +391,67 @@ void SetResolvedNumberSettings(Isolate* isolate, const char* ns = numbering_system->getName(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")), - isolate->factory()->NewStringFromAscii(CStrVector(ns)), + factory->NewStringFromStaticAscii("numberingSystem"), + factory->NewStringFromAsciiChecked(ns), NONE, - SLOPPY); + SLOPPY).Assert(); } else { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")), - isolate->factory()->undefined_value(), + factory->NewStringFromStaticAscii("numberingSystem"), + factory->undefined_value(), NONE, - SLOPPY); + SLOPPY).Assert(); } delete numbering_system; JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("useGrouping")), - isolate->factory()->ToBoolean(number_format->isGroupingUsed()), + factory->NewStringFromStaticAscii("useGrouping"), + factory->ToBoolean(number_format->isGroupingUsed()), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii( - CStrVector("minimumIntegerDigits")), - isolate->factory()->NewNumberFromInt( - number_format->getMinimumIntegerDigits()), + factory->NewStringFromStaticAscii("minimumIntegerDigits"), + factory->NewNumberFromInt(number_format->getMinimumIntegerDigits()), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii( - CStrVector("minimumFractionDigits")), - isolate->factory()->NewNumberFromInt( - number_format->getMinimumFractionDigits()), + factory->NewStringFromStaticAscii("minimumFractionDigits"), + factory->NewNumberFromInt(number_format->getMinimumFractionDigits()), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii( - CStrVector("maximumFractionDigits")), - isolate->factory()->NewNumberFromInt( - number_format->getMaximumFractionDigits()), + factory->NewStringFromStaticAscii("maximumFractionDigits"), + factory->NewNumberFromInt(number_format->getMaximumFractionDigits()), NONE, - SLOPPY); + SLOPPY).Assert(); - Handle<String> key = isolate->factory()->NewStringFromAscii( - CStrVector("minimumSignificantDigits")); + Handle<String> key = + factory->NewStringFromStaticAscii("minimumSignificantDigits"); if (JSReceiver::HasLocalProperty(resolved, key)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii( - CStrVector("minimumSignificantDigits")), - isolate->factory()->NewNumberFromInt( - number_format->getMinimumSignificantDigits()), + factory->NewStringFromStaticAscii("minimumSignificantDigits"), + factory->NewNumberFromInt(number_format->getMinimumSignificantDigits()), NONE, - SLOPPY); + SLOPPY).Assert(); } - key = isolate->factory()->NewStringFromAscii( - CStrVector("maximumSignificantDigits")); + key = factory->NewStringFromStaticAscii("maximumSignificantDigits"); if (JSReceiver::HasLocalProperty(resolved, key)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii( - CStrVector("maximumSignificantDigits")), - isolate->factory()->NewNumberFromInt( - number_format->getMaximumSignificantDigits()), + factory->NewStringFromStaticAscii("maximumSignificantDigits"), + factory->NewNumberFromInt(number_format->getMaximumSignificantDigits()), NONE, - SLOPPY); + SLOPPY).Assert(); } // Set the locale @@ -497,18 +462,18 @@ void SetResolvedNumberSettings(Isolate* isolate, if (U_SUCCESS(status)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector(result)), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromAsciiChecked(result), NONE, - SLOPPY); + SLOPPY).Assert(); } else { // This would never happen, since we got the locale from ICU. JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector("und")), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromStaticAscii("und"), NONE, - SLOPPY); + SLOPPY).Assert(); } } @@ -581,135 +546,136 @@ void SetResolvedCollatorSettings(Isolate* isolate, const icu::Locale& icu_locale, icu::Collator* collator, Handle<JSObject> resolved) { + Factory* factory = isolate->factory(); UErrorCode status = U_ZERO_ERROR; JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("numeric")), - isolate->factory()->ToBoolean( + factory->NewStringFromStaticAscii("numeric"), + factory->ToBoolean( collator->getAttribute(UCOL_NUMERIC_COLLATION, status) == UCOL_ON), NONE, - SLOPPY); + SLOPPY).Assert(); switch (collator->getAttribute(UCOL_CASE_FIRST, status)) { case UCOL_LOWER_FIRST: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("caseFirst")), - isolate->factory()->NewStringFromAscii(CStrVector("lower")), + factory->NewStringFromStaticAscii("caseFirst"), + factory->NewStringFromStaticAscii("lower"), NONE, - SLOPPY); + SLOPPY).Assert(); break; case UCOL_UPPER_FIRST: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("caseFirst")), - isolate->factory()->NewStringFromAscii(CStrVector("upper")), + factory->NewStringFromStaticAscii("caseFirst"), + factory->NewStringFromStaticAscii("upper"), NONE, - SLOPPY); + SLOPPY).Assert(); break; default: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("caseFirst")), - isolate->factory()->NewStringFromAscii(CStrVector("false")), + factory->NewStringFromStaticAscii("caseFirst"), + factory->NewStringFromStaticAscii("false"), NONE, - SLOPPY); + SLOPPY).Assert(); } switch (collator->getAttribute(UCOL_STRENGTH, status)) { case UCOL_PRIMARY: { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("strength")), - isolate->factory()->NewStringFromAscii(CStrVector("primary")), + factory->NewStringFromStaticAscii("strength"), + factory->NewStringFromStaticAscii("primary"), NONE, - SLOPPY); + SLOPPY).Assert(); // case level: true + s1 -> case, s1 -> base. if (UCOL_ON == collator->getAttribute(UCOL_CASE_LEVEL, status)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("case")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("case"), NONE, - SLOPPY); + SLOPPY).Assert(); } else { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("base")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("base"), NONE, - SLOPPY); + SLOPPY).Assert(); } break; } case UCOL_SECONDARY: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("strength")), - isolate->factory()->NewStringFromAscii(CStrVector("secondary")), + factory->NewStringFromStaticAscii("strength"), + factory->NewStringFromStaticAscii("secondary"), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("accent")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("accent"), NONE, - SLOPPY); + SLOPPY).Assert(); break; case UCOL_TERTIARY: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("strength")), - isolate->factory()->NewStringFromAscii(CStrVector("tertiary")), + factory->NewStringFromStaticAscii("strength"), + factory->NewStringFromStaticAscii("tertiary"), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("variant")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("variant"), NONE, - SLOPPY); + SLOPPY).Assert(); break; case UCOL_QUATERNARY: // We shouldn't get quaternary and identical from ICU, but if we do // put them into variant. JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("strength")), - isolate->factory()->NewStringFromAscii(CStrVector("quaternary")), + factory->NewStringFromStaticAscii("strength"), + factory->NewStringFromStaticAscii("quaternary"), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("variant")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("variant"), NONE, - SLOPPY); + SLOPPY).Assert(); break; default: JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("strength")), - isolate->factory()->NewStringFromAscii(CStrVector("identical")), + factory->NewStringFromStaticAscii("strength"), + factory->NewStringFromStaticAscii("identical"), NONE, - SLOPPY); + SLOPPY).Assert(); JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("sensitivity")), - isolate->factory()->NewStringFromAscii(CStrVector("variant")), + factory->NewStringFromStaticAscii("sensitivity"), + factory->NewStringFromStaticAscii("variant"), NONE, - SLOPPY); + SLOPPY).Assert(); } JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("ignorePunctuation")), - isolate->factory()->ToBoolean(collator->getAttribute( + factory->NewStringFromStaticAscii("ignorePunctuation"), + factory->ToBoolean(collator->getAttribute( UCOL_ALTERNATE_HANDLING, status) == UCOL_SHIFTED), NONE, - SLOPPY); + SLOPPY).Assert(); // Set the locale char result[ULOC_FULLNAME_CAPACITY]; @@ -719,18 +685,18 @@ void SetResolvedCollatorSettings(Isolate* isolate, if (U_SUCCESS(status)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector(result)), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromAsciiChecked(result), NONE, - SLOPPY); + SLOPPY).Assert(); } else { // This would never happen, since we got the locale from ICU. JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector("und")), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromStaticAscii("und"), NONE, - SLOPPY); + SLOPPY).Assert(); } } @@ -772,6 +738,7 @@ void SetResolvedBreakIteratorSettings(Isolate* isolate, const icu::Locale& icu_locale, icu::BreakIterator* break_iterator, Handle<JSObject> resolved) { + Factory* factory = isolate->factory(); UErrorCode status = U_ZERO_ERROR; // Set the locale @@ -782,18 +749,18 @@ void SetResolvedBreakIteratorSettings(Isolate* isolate, if (U_SUCCESS(status)) { JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector(result)), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromAsciiChecked(result), NONE, - SLOPPY); + SLOPPY).Assert(); } else { // This would never happen, since we got the locale from ICU. JSObject::SetProperty( resolved, - isolate->factory()->NewStringFromAscii(CStrVector("locale")), - isolate->factory()->NewStringFromAscii(CStrVector("und")), + factory->NewStringFromStaticAscii("locale"), + factory->NewStringFromStaticAscii("und"), NONE, - SLOPPY); + SLOPPY).Assert(); } } @@ -855,7 +822,7 @@ icu::SimpleDateFormat* DateFormat::UnpackDateFormat( Isolate* isolate, Handle<JSObject> obj) { Handle<String> key = - isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")); + isolate->factory()->NewStringFromStaticAscii("dateFormat"); if (JSReceiver::HasLocalProperty(obj, key)) { return reinterpret_cast<icu::SimpleDateFormat*>( obj->GetInternalField(0)); @@ -929,7 +896,7 @@ icu::DecimalFormat* NumberFormat::UnpackNumberFormat( Isolate* isolate, Handle<JSObject> obj) { Handle<String> key = - isolate->factory()->NewStringFromAscii(CStrVector("numberFormat")); + isolate->factory()->NewStringFromStaticAscii("numberFormat"); if (JSReceiver::HasLocalProperty(obj, key)) { return reinterpret_cast<icu::DecimalFormat*>(obj->GetInternalField(0)); } @@ -984,8 +951,7 @@ icu::Collator* Collator::InitializeCollator( icu::Collator* Collator::UnpackCollator(Isolate* isolate, Handle<JSObject> obj) { - Handle<String> key = - isolate->factory()->NewStringFromAscii(CStrVector("collator")); + Handle<String> key = isolate->factory()->NewStringFromStaticAscii("collator"); if (JSReceiver::HasLocalProperty(obj, key)) { return reinterpret_cast<icu::Collator*>(obj->GetInternalField(0)); } @@ -1044,7 +1010,7 @@ icu::BreakIterator* BreakIterator::InitializeBreakIterator( icu::BreakIterator* BreakIterator::UnpackBreakIterator(Isolate* isolate, Handle<JSObject> obj) { Handle<String> key = - isolate->factory()->NewStringFromAscii(CStrVector("breakIterator")); + isolate->factory()->NewStringFromStaticAscii("breakIterator"); if (JSReceiver::HasLocalProperty(obj, key)) { return reinterpret_cast<icu::BreakIterator*>(obj->GetInternalField(0)); } diff --git a/deps/v8/src/i18n.h b/deps/v8/src/i18n.h index 50beb49ba..e312322b6 100644 --- a/deps/v8/src/i18n.h +++ b/deps/v8/src/i18n.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // limitations under the License. #ifndef V8_I18N_H_ diff --git a/deps/v8/src/i18n.js b/deps/v8/src/i18n.js index 2e5485749..4fcb02b44 100644 --- a/deps/v8/src/i18n.js +++ b/deps/v8/src/i18n.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // limitations under the License. // ECMAScript 402 API implementation. @@ -232,8 +209,7 @@ var ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR = */ function addBoundMethod(obj, methodName, implementation, length) { function getter() { - if (!this || typeof this !== 'object' || - this.__initializedIntlObject === undefined) { + if (!%IsInitializedIntlObject(this)) { throw new $TypeError('Method ' + methodName + ' called on a ' + 'non-object or on a wrong type of object.'); } @@ -282,7 +258,7 @@ function addBoundMethod(obj, methodName, implementation, length) { %FunctionRemovePrototype(getter); %SetNativeFlag(getter); - $Object.defineProperty(obj.prototype, methodName, { + ObjectDefineProperty(obj.prototype, methodName, { get: getter, enumerable: false, configurable: true @@ -303,7 +279,7 @@ function supportedLocalesOf(service, locales, options) { if (options === undefined) { options = {}; } else { - options = toObject(options); + options = ToObject(options); } var matcher = options.localeMatcher; @@ -537,18 +513,6 @@ function parseExtension(extension) { /** - * Converts parameter to an Object if possible. - */ -function toObject(value) { - if (IS_NULL_OR_UNDEFINED(value)) { - throw new $TypeError('Value cannot be converted to an Object.'); - } - - return $Object(value); -} - - -/** * Populates internalOptions object with boolean key-value pairs * from extensionMap and options. * Returns filtered extension (number and date format constructors use @@ -617,15 +581,14 @@ function setOptions(inOptions, extensionMap, keyValues, getOption, outOptions) { */ function freezeArray(array) { array.forEach(function(element, index) { - $Object.defineProperty(array, index, {value: element, + ObjectDefineProperty(array, index, {value: element, configurable: false, writable: false, enumerable: true}); }); - $Object.defineProperty(array, 'length', {value: array.length, - writable: false}); - + ObjectDefineProperty(array, 'length', {value: array.length, + writable: false}); return array; } @@ -686,8 +649,8 @@ function getAvailableLocalesOf(service) { * Configurable is false by default. */ function defineWEProperty(object, property, value) { - $Object.defineProperty(object, property, - {value: value, writable: true, enumerable: true}); + ObjectDefineProperty(object, property, + {value: value, writable: true, enumerable: true}); } @@ -706,11 +669,11 @@ function addWEPropertyIfDefined(object, property, value) { * Defines a property and sets writable, enumerable and configurable to true. */ function defineWECProperty(object, property, value) { - $Object.defineProperty(object, property, - {value: value, - writable: true, - enumerable: true, - configurable: true}); + ObjectDefineProperty(object, property, + {value: value, + writable: true, + enumerable: true, + configurable: true}); } @@ -777,7 +740,7 @@ function initializeLocaleList(locales) { return freezeArray(seen); } - var o = toObject(locales); + var o = ToObject(locales); // Converts it to UInt32 (>>> is shr on 32bit integers). var len = o.length >>> 0; @@ -896,7 +859,7 @@ function BuildLanguageTagREs() { * Useful for subclassing. */ function initializeCollator(collator, locales, options) { - if (collator.hasOwnProperty('__initializedIntlObject')) { + if (%IsInitializedIntlObject(collator)) { throw new $TypeError('Trying to re-initialize Collator object.'); } @@ -949,8 +912,8 @@ function initializeCollator(collator, locales, options) { // We define all properties C++ code may produce, to prevent security // problems. If malicious user decides to redefine Object.prototype.locale // we can't just use plain x.locale = 'us' or in C++ Set("locale", "us"). - // Object.defineProperties will either succeed defining or throw an error. - var resolved = $Object.defineProperties({}, { + // ObjectDefineProperties will either succeed defining or throw an error. + var resolved = ObjectDefineProperties({}, { caseFirst: {writable: true}, collation: {value: internalOptions.collation, writable: true}, ignorePunctuation: {writable: true}, @@ -967,10 +930,8 @@ function initializeCollator(collator, locales, options) { resolved); // Writable, configurable and enumerable are set to false by default. - $Object.defineProperty(collator, 'collator', {value: internalCollator}); - $Object.defineProperty(collator, '__initializedIntlObject', - {value: 'collator'}); - $Object.defineProperty(collator, 'resolved', {value: resolved}); + %MarkAsInitializedIntlObjectOfType(collator, 'collator', internalCollator); + ObjectDefineProperty(collator, 'resolved', {value: resolved}); return collator; } @@ -991,7 +952,7 @@ function initializeCollator(collator, locales, options) { return new Intl.Collator(locales, options); } - return initializeCollator(toObject(this), locales, options); + return initializeCollator(ToObject(this), locales, options); }, DONT_ENUM ); @@ -1005,8 +966,7 @@ function initializeCollator(collator, locales, options) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); } - if (!this || typeof this !== 'object' || - this.__initializedIntlObject !== 'collator') { + if (!%IsInitializedIntlObjectOfType(this, 'collator')) { throw new $TypeError('resolvedOptions method called on a non-object ' + 'or on a object that is not Intl.Collator.'); } @@ -1063,7 +1023,8 @@ function initializeCollator(collator, locales, options) { * the sort order, or x comes after y in the sort order, respectively. */ function compare(collator, x, y) { - return %InternalCompare(collator.collator, $String(x), $String(y)); + return %InternalCompare(%GetImplFromInitializedIntlObject(collator), + $String(x), $String(y)); }; @@ -1104,7 +1065,7 @@ function getNumberOption(options, property, min, max, fallback) { * Useful for subclassing. */ function initializeNumberFormat(numberFormat, locales, options) { - if (numberFormat.hasOwnProperty('__initializedIntlObject')) { + if (%IsInitializedIntlObject(numberFormat)) { throw new $TypeError('Trying to re-initialize NumberFormat object.'); } @@ -1167,7 +1128,7 @@ function initializeNumberFormat(numberFormat, locales, options) { getOption, internalOptions); var requestedLocale = locale.locale + extension; - var resolved = $Object.defineProperties({}, { + var resolved = ObjectDefineProperties({}, { currency: {writable: true}, currencyDisplay: {writable: true}, locale: {writable: true}, @@ -1192,14 +1153,12 @@ function initializeNumberFormat(numberFormat, locales, options) { // We can't get information about number or currency style from ICU, so we // assume user request was fulfilled. if (internalOptions.style === 'currency') { - $Object.defineProperty(resolved, 'currencyDisplay', {value: currencyDisplay, - writable: true}); + ObjectDefineProperty(resolved, 'currencyDisplay', {value: currencyDisplay, + writable: true}); } - $Object.defineProperty(numberFormat, 'formatter', {value: formatter}); - $Object.defineProperty(numberFormat, 'resolved', {value: resolved}); - $Object.defineProperty(numberFormat, '__initializedIntlObject', - {value: 'numberformat'}); + %MarkAsInitializedIntlObjectOfType(numberFormat, 'numberformat', formatter); + ObjectDefineProperty(numberFormat, 'resolved', {value: resolved}); return numberFormat; } @@ -1220,7 +1179,7 @@ function initializeNumberFormat(numberFormat, locales, options) { return new Intl.NumberFormat(locales, options); } - return initializeNumberFormat(toObject(this), locales, options); + return initializeNumberFormat(ToObject(this), locales, options); }, DONT_ENUM ); @@ -1234,8 +1193,7 @@ function initializeNumberFormat(numberFormat, locales, options) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); } - if (!this || typeof this !== 'object' || - this.__initializedIntlObject !== 'numberformat') { + if (!%IsInitializedIntlObjectOfType(this, 'numberformat')) { throw new $TypeError('resolvedOptions method called on a non-object' + ' or on a object that is not Intl.NumberFormat.'); } @@ -1309,7 +1267,8 @@ function formatNumber(formatter, value) { // Spec treats -0 and +0 as 0. var number = $Number(value) + 0; - return %InternalNumberFormat(formatter.formatter, number); + return %InternalNumberFormat(%GetImplFromInitializedIntlObject(formatter), + number); } @@ -1317,7 +1276,8 @@ function formatNumber(formatter, value) { * Returns a Number that represents string value that was passed in. */ function parseNumber(formatter, value) { - return %InternalNumberParse(formatter.formatter, $String(value)); + return %InternalNumberParse(%GetImplFromInitializedIntlObject(formatter), + $String(value)); } @@ -1470,13 +1430,11 @@ function appendToDateTimeObject(options, option, match, pairs) { */ function toDateTimeOptions(options, required, defaults) { if (options === undefined) { - options = null; + options = {}; } else { - options = toObject(options); + options = TO_OBJECT_INLINE(options); } - options = $Object.apply(this, [options]); - var needsDefault = true; if ((required === 'date' || required === 'any') && (options.weekday !== undefined || options.year !== undefined || @@ -1491,30 +1449,30 @@ function toDateTimeOptions(options, required, defaults) { } if (needsDefault && (defaults === 'date' || defaults === 'all')) { - $Object.defineProperty(options, 'year', {value: 'numeric', - writable: true, - enumerable: true, - configurable: true}); - $Object.defineProperty(options, 'month', {value: 'numeric', - writable: true, - enumerable: true, - configurable: true}); - $Object.defineProperty(options, 'day', {value: 'numeric', + ObjectDefineProperty(options, 'year', {value: 'numeric', + writable: true, + enumerable: true, + configurable: true}); + ObjectDefineProperty(options, 'month', {value: 'numeric', writable: true, enumerable: true, configurable: true}); + ObjectDefineProperty(options, 'day', {value: 'numeric', + writable: true, + enumerable: true, + configurable: true}); } if (needsDefault && (defaults === 'time' || defaults === 'all')) { - $Object.defineProperty(options, 'hour', {value: 'numeric', + ObjectDefineProperty(options, 'hour', {value: 'numeric', writable: true, enumerable: true, configurable: true}); - $Object.defineProperty(options, 'minute', {value: 'numeric', + ObjectDefineProperty(options, 'minute', {value: 'numeric', writable: true, enumerable: true, configurable: true}); - $Object.defineProperty(options, 'second', {value: 'numeric', + ObjectDefineProperty(options, 'second', {value: 'numeric', writable: true, enumerable: true, configurable: true}); @@ -1530,7 +1488,7 @@ function toDateTimeOptions(options, required, defaults) { */ function initializeDateTimeFormat(dateFormat, locales, options) { - if (dateFormat.hasOwnProperty('__initializedIntlObject')) { + if (%IsInitializedIntlObject(dateFormat)) { throw new $TypeError('Trying to re-initialize DateTimeFormat object.'); } @@ -1565,7 +1523,7 @@ function initializeDateTimeFormat(dateFormat, locales, options) { getOption, internalOptions); var requestedLocale = locale.locale + extension; - var resolved = $Object.defineProperties({}, { + var resolved = ObjectDefineProperties({}, { calendar: {writable: true}, day: {writable: true}, era: {writable: true}, @@ -1592,10 +1550,8 @@ function initializeDateTimeFormat(dateFormat, locales, options) { throw new $RangeError('Unsupported time zone specified ' + tz); } - $Object.defineProperty(dateFormat, 'formatter', {value: formatter}); - $Object.defineProperty(dateFormat, 'resolved', {value: resolved}); - $Object.defineProperty(dateFormat, '__initializedIntlObject', - {value: 'dateformat'}); + %MarkAsInitializedIntlObjectOfType(dateFormat, 'dateformat', formatter); + ObjectDefineProperty(dateFormat, 'resolved', {value: resolved}); return dateFormat; } @@ -1616,7 +1572,7 @@ function initializeDateTimeFormat(dateFormat, locales, options) { return new Intl.DateTimeFormat(locales, options); } - return initializeDateTimeFormat(toObject(this), locales, options); + return initializeDateTimeFormat(ToObject(this), locales, options); }, DONT_ENUM ); @@ -1630,8 +1586,7 @@ function initializeDateTimeFormat(dateFormat, locales, options) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); } - if (!this || typeof this !== 'object' || - this.__initializedIntlObject !== 'dateformat') { + if (!%IsInitializedIntlObjectOfType(this, 'dateformat')) { throw new $TypeError('resolvedOptions method called on a non-object or ' + 'on a object that is not Intl.DateTimeFormat.'); } @@ -1713,7 +1668,8 @@ function formatDate(formatter, dateValue) { throw new $RangeError('Provided date is not in valid range.'); } - return %InternalDateFormat(formatter.formatter, new $Date(dateMs)); + return %InternalDateFormat(%GetImplFromInitializedIntlObject(formatter), + new $Date(dateMs)); } @@ -1724,7 +1680,8 @@ function formatDate(formatter, dateValue) { * Returns undefined if date string cannot be parsed. */ function parseDate(formatter, value) { - return %InternalDateParse(formatter.formatter, $String(value)); + return %InternalDateParse(%GetImplFromInitializedIntlObject(formatter), + $String(value)); } @@ -1772,7 +1729,7 @@ function canonicalizeTimeZoneID(tzID) { * Useful for subclassing. */ function initializeBreakIterator(iterator, locales, options) { - if (iterator.hasOwnProperty('__initializedIntlObject')) { + if (%IsInitializedIntlObject(iterator)) { throw new $TypeError('Trying to re-initialize v8BreakIterator object.'); } @@ -1788,7 +1745,7 @@ function initializeBreakIterator(iterator, locales, options) { 'type', 'string', ['character', 'word', 'sentence', 'line'], 'word')); var locale = resolveLocale('breakiterator', locales, options); - var resolved = $Object.defineProperties({}, { + var resolved = ObjectDefineProperties({}, { requestedLocale: {value: locale.locale, writable: true}, type: {value: internalOptions.type, writable: true}, locale: {writable: true} @@ -1798,10 +1755,9 @@ function initializeBreakIterator(iterator, locales, options) { internalOptions, resolved); - $Object.defineProperty(iterator, 'iterator', {value: internalIterator}); - $Object.defineProperty(iterator, 'resolved', {value: resolved}); - $Object.defineProperty(iterator, '__initializedIntlObject', - {value: 'breakiterator'}); + %MarkAsInitializedIntlObjectOfType(iterator, 'breakiterator', + internalIterator); + ObjectDefineProperty(iterator, 'resolved', {value: resolved}); return iterator; } @@ -1822,7 +1778,7 @@ function initializeBreakIterator(iterator, locales, options) { return new Intl.v8BreakIterator(locales, options); } - return initializeBreakIterator(toObject(this), locales, options); + return initializeBreakIterator(ToObject(this), locales, options); }, DONT_ENUM ); @@ -1836,8 +1792,7 @@ function initializeBreakIterator(iterator, locales, options) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); } - if (!this || typeof this !== 'object' || - this.__initializedIntlObject !== 'breakiterator') { + if (!%IsInitializedIntlObjectOfType(this, 'breakiterator')) { throw new $TypeError('resolvedOptions method called on a non-object or ' + 'on a object that is not Intl.v8BreakIterator.'); } @@ -1884,7 +1839,8 @@ function initializeBreakIterator(iterator, locales, options) { * gets discarded. */ function adoptText(iterator, text) { - %BreakIteratorAdoptText(iterator.iterator, $String(text)); + %BreakIteratorAdoptText(%GetImplFromInitializedIntlObject(iterator), + $String(text)); } @@ -1892,7 +1848,7 @@ function adoptText(iterator, text) { * Returns index of the first break in the string and moves current pointer. */ function first(iterator) { - return %BreakIteratorFirst(iterator.iterator); + return %BreakIteratorFirst(%GetImplFromInitializedIntlObject(iterator)); } @@ -1900,7 +1856,7 @@ function first(iterator) { * Returns the index of the next break and moves the pointer. */ function next(iterator) { - return %BreakIteratorNext(iterator.iterator); + return %BreakIteratorNext(%GetImplFromInitializedIntlObject(iterator)); } @@ -1908,7 +1864,7 @@ function next(iterator) { * Returns index of the current break. */ function current(iterator) { - return %BreakIteratorCurrent(iterator.iterator); + return %BreakIteratorCurrent(%GetImplFromInitializedIntlObject(iterator)); } @@ -1916,7 +1872,7 @@ function current(iterator) { * Returns type of the current break. */ function breakType(iterator) { - return %BreakIteratorBreakType(iterator.iterator); + return %BreakIteratorBreakType(%GetImplFromInitializedIntlObject(iterator)); } @@ -1967,7 +1923,7 @@ function cachedOrNewService(service, locales, options, defaults) { * Compares this and that, and returns less than 0, 0 or greater than 0 value. * Overrides the built-in method. */ -$Object.defineProperty($String.prototype, 'localeCompare', { +ObjectDefineProperty($String.prototype, 'localeCompare', { value: function(that) { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); @@ -1998,7 +1954,7 @@ $Object.defineProperty($String.prototype, 'localeCompare', { * If the form is not one of "NFC", "NFD", "NFKC", or "NFKD", then throw * a RangeError Exception. */ -$Object.defineProperty($String.prototype, 'normalize', { +ObjectDefineProperty($String.prototype, 'normalize', { value: function(that) { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); @@ -2029,7 +1985,7 @@ $Object.defineProperty($String.prototype, 'normalize', { * Formats a Number object (this) using locale and options values. * If locale or options are omitted, defaults are used. */ -$Object.defineProperty($Number.prototype, 'toLocaleString', { +ObjectDefineProperty($Number.prototype, 'toLocaleString', { value: function() { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); @@ -2079,7 +2035,7 @@ function toLocaleDateTime(date, locales, options, required, defaults, service) { * If locale or options are omitted, defaults are used - both date and time are * present in the output. */ -$Object.defineProperty($Date.prototype, 'toLocaleString', { +ObjectDefineProperty($Date.prototype, 'toLocaleString', { value: function() { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); @@ -2104,7 +2060,7 @@ $Object.defineProperty($Date.prototype, 'toLocaleString', { * If locale or options are omitted, defaults are used - only date is present * in the output. */ -$Object.defineProperty($Date.prototype, 'toLocaleDateString', { +ObjectDefineProperty($Date.prototype, 'toLocaleDateString', { value: function() { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); @@ -2129,7 +2085,7 @@ $Object.defineProperty($Date.prototype, 'toLocaleDateString', { * If locale or options are omitted, defaults are used - only time is present * in the output. */ -$Object.defineProperty($Date.prototype, 'toLocaleTimeString', { +ObjectDefineProperty($Date.prototype, 'toLocaleTimeString', { value: function() { if (%_IsConstructCall()) { throw new $TypeError(ORDINARY_FUNCTION_CALLED_AS_CONSTRUCTOR); diff --git a/deps/v8/src/ia32/assembler-ia32-inl.h b/deps/v8/src/ia32/assembler-ia32-inl.h index 8022f0592..97aeeae72 100644 --- a/deps/v8/src/ia32/assembler-ia32-inl.h +++ b/deps/v8/src/ia32/assembler-ia32-inl.h @@ -285,14 +285,12 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { CPU::FlushICache(pc_, sizeof(Address)); } else if (RelocInfo::IsCodeAgeSequence(mode)) { visitor->VisitCodeAgeSequence(this); - #ifdef ENABLE_DEBUGGER_SUPPORT } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence())) && isolate->debug()->has_break_points()) { visitor->VisitDebugTarget(this); -#endif } else if (IsRuntimeEntry(mode)) { visitor->VisitRuntimeEntry(this); } @@ -314,14 +312,12 @@ void RelocInfo::Visit(Heap* heap) { CPU::FlushICache(pc_, sizeof(Address)); } else if (RelocInfo::IsCodeAgeSequence(mode)) { StaticVisitor::VisitCodeAgeSequence(heap, this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (heap->isolate()->debug()->has_break_points() && ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence()))) { StaticVisitor::VisitDebugTarget(heap, this); -#endif } else if (IsRuntimeEntry(mode)) { StaticVisitor::VisitRuntimeEntry(this); } diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc index 3a4f590c8..7a88e7082 100644 --- a/deps/v8/src/ia32/assembler-ia32.cc +++ b/deps/v8/src/ia32/assembler-ia32.cc @@ -89,13 +89,13 @@ const char* IntelDoubleRegister::AllocationIndexToString(int index) { } -void CpuFeatures::Probe() { +void CpuFeatures::Probe(bool serializer_enabled) { ASSERT(!initialized_); ASSERT(supported_ == 0); #ifdef DEBUG initialized_ = true; #endif - if (Serializer::enabled()) { + if (serializer_enabled) { supported_ |= OS::CpuFeaturesImpliedByPlatform(); return; // No features if we might serialize. } @@ -2703,12 +2703,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { ASSERT(!RelocInfo::IsNone(rmode)); // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - if (!Serializer::enabled() && !emit_debug_code()) { + if (!Serializer::enabled(isolate()) && !emit_debug_code()) { return; } } @@ -2717,16 +2712,17 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { } -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { +Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { // No out-of-line constant pool support. - UNREACHABLE(); - return NULL; + ASSERT(!FLAG_enable_ool_constant_pool); + return isolate->factory()->empty_constant_pool_array(); } void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { // No out-of-line constant pool support. - UNREACHABLE(); + ASSERT(!FLAG_enable_ool_constant_pool); + return; } diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h index 27e5302db..3033db936 100644 --- a/deps/v8/src/ia32/assembler-ia32.h +++ b/deps/v8/src/ia32/assembler-ia32.h @@ -530,7 +530,7 @@ class CpuFeatures : public AllStatic { public: // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). - static void Probe(); + static void Probe(bool serializer_enabled); // Check whether a feature is supported by the target CPU. static bool IsSupported(CpuFeature f) { @@ -543,15 +543,11 @@ class CpuFeatures : public AllStatic { return Check(f, supported_); } - static bool IsFoundByRuntimeProbingOnly(CpuFeature f) { - ASSERT(initialized_); - return Check(f, found_by_runtime_probing_only_); - } - - static bool IsSafeForSnapshot(CpuFeature f) { + static bool IsSafeForSnapshot(Isolate* isolate, CpuFeature f) { return Check(f, cross_compile_) || (IsSupported(f) && - (!Serializer::enabled() || !IsFoundByRuntimeProbingOnly(f))); + !(Serializer::enabled(isolate) && + Check(f, found_by_runtime_probing_only_))); } static bool VerifyCrossCompiling() { @@ -564,6 +560,8 @@ class CpuFeatures : public AllStatic { (cross_compile_ & mask) == mask; } + static bool SupportsCrankshaft() { return IsSupported(SSE2); } + private: static bool Check(CpuFeature f, uint64_t set) { return (set & flag2set(f)) != 0; @@ -1192,7 +1190,7 @@ class Assembler : public AssemblerBase { void set_byte_at(int pos, byte value) { buffer_[pos] = value; } // Allocate a constant pool of the correct size for the generated code. - MaybeObject* AllocateConstantPool(Heap* heap); + Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate); // Generate the constant pool for the generated code. void PopulateConstantPool(ConstantPoolArray* constant_pool); diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc index 785c5fd61..b3af2b2fe 100644 --- a/deps/v8/src/ia32/builtins-ia32.cc +++ b/deps/v8/src/ia32/builtins-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -163,12 +140,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Label rt_call, allocated; if (FLAG_inline_new) { Label undo_allocation; -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = ExternalReference::debug_step_in_fp_address(masm->isolate()); __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0)); __ j(not_equal, &rt_call); -#endif // Verified that the constructor is a JSFunction. // Load the initial map and verify that it is in fact a map. @@ -540,7 +515,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, if (is_construct) { // No type feedback cell is available __ mov(ebx, masm->isolate()->factory()->undefined_value()); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ CallStub(&stub); } else { ParameterCount actual(eax); @@ -702,8 +677,8 @@ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { - if (Serializer::enabled()) { - PlatformFeatureScope sse2(SSE2); + if (Serializer::enabled(masm->isolate())) { + PlatformFeatureScope sse2(masm->isolate(), SSE2); Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); } else { Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); @@ -949,7 +924,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // Out of stack space. __ push(Operand(ebp, 4 * kPointerSize)); // push this __ push(eax); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); __ bind(&okay); // End of stack check. @@ -1252,6 +1227,33 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { } +static void ArgumentsAdaptorStackCheck(MacroAssembler* masm, + Label* stack_overflow) { + // ----------- S t a t e ------------- + // -- eax : actual number of arguments + // -- ebx : expected number of arguments + // -- edi : function (passed through to callee) + // ----------------------------------- + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + ExternalReference real_stack_limit = + ExternalReference::address_of_real_stack_limit(masm->isolate()); + __ mov(edx, Operand::StaticVariable(real_stack_limit)); + // Make ecx the space we have left. The stack might already be overflowed + // here which will cause ecx to become negative. + __ mov(ecx, esp); + __ sub(ecx, edx); + // Make edx the space we need for the array when it is unrolled onto the + // stack. + __ mov(edx, ebx); + __ shl(edx, kPointerSizeLog2); + // Check if the arguments will overflow the stack. + __ cmp(ecx, edx); + __ j(less_equal, stack_overflow); // Signed comparison. +} + + static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ push(ebp); __ mov(ebp, esp); @@ -1296,6 +1298,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { Label invoke, dont_adapt_arguments; __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1); + Label stack_overflow; + ArgumentsAdaptorStackCheck(masm, &stack_overflow); + Label enough, too_few; __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); __ cmp(eax, ebx); @@ -1370,6 +1375,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ------------------------------------------- __ bind(&dont_adapt_arguments); __ jmp(edx); + + __ bind(&stack_overflow); + { + FrameScope frame(masm, StackFrame::MANUAL); + EnterArgumentsAdaptorFrame(masm); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + __ int3(); + } } diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc index ab29167e9..174ebbbfa 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.cc +++ b/deps/v8/src/ia32/code-stubs-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -44,7 +21,6 @@ namespace internal { void FastNewClosureStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { ebx }; descriptor->register_param_count_ = 1; @@ -55,7 +31,6 @@ void FastNewClosureStub::InitializeInterfaceDescriptor( void FastNewContextStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edi }; descriptor->register_param_count_ = 1; @@ -65,7 +40,6 @@ void FastNewContextStub::InitializeInterfaceDescriptor( void ToNumberStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax }; descriptor->register_param_count_ = 1; @@ -75,7 +49,6 @@ void ToNumberStub::InitializeInterfaceDescriptor( void NumberToStringStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax }; descriptor->register_param_count_ = 1; @@ -86,7 +59,6 @@ void NumberToStringStub::InitializeInterfaceDescriptor( void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax, ebx, ecx }; descriptor->register_param_count_ = 3; @@ -98,7 +70,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax, ebx, ecx, edx }; descriptor->register_param_count_ = 4; @@ -109,7 +80,6 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { ebx, edx }; descriptor->register_param_count_ = 2; @@ -119,7 +89,6 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor( void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx }; descriptor->register_param_count_ = 2; @@ -130,7 +99,6 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx }; descriptor->register_param_count_ = 2; @@ -141,7 +109,6 @@ void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( void RegExpConstructResultStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { ecx, ebx, eax }; descriptor->register_param_count_ = 3; @@ -152,7 +119,6 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor( void LoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx }; descriptor->register_param_count_ = 1; @@ -162,7 +128,6 @@ void LoadFieldStub::InitializeInterfaceDescriptor( void KeyedLoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx }; descriptor->register_param_count_ = 1; @@ -172,7 +137,6 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor( void StringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx }; descriptor->register_param_count_ = 2; @@ -182,7 +146,6 @@ void StringLengthStub::InitializeInterfaceDescriptor( void KeyedStringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx }; descriptor->register_param_count_ = 2; @@ -192,7 +155,6 @@ void KeyedStringLengthStub::InitializeInterfaceDescriptor( void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx, eax }; descriptor->register_param_count_ = 3; @@ -203,7 +165,6 @@ void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( void TransitionElementsKindStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax, ebx }; descriptor->register_param_count_ = 2; @@ -243,7 +204,6 @@ static void InitializeArrayConstructorDescriptor( static void InitializeInternalArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -271,49 +231,42 @@ static void InitializeInternalArrayConstructorDescriptor( void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeArrayConstructorDescriptor(isolate(), descriptor, 0); } void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeArrayConstructorDescriptor(isolate(), descriptor, 1); } void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeArrayConstructorDescriptor(isolate(), descriptor, -1); } void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeInternalArrayConstructorDescriptor(descriptor, 0); } void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeInternalArrayConstructorDescriptor(descriptor, 1); } void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeInternalArrayConstructorDescriptor(descriptor, -1); } void CompareNilICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax }; descriptor->register_param_count_ = 1; @@ -321,11 +274,10 @@ void CompareNilICStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(CompareNilIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); } void ToBooleanStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax }; descriptor->register_param_count_ = 1; @@ -333,12 +285,11 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); } void StoreGlobalStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, ecx, eax }; descriptor->register_param_count_ = 3; @@ -349,7 +300,6 @@ void StoreGlobalStub::InitializeInterfaceDescriptor( void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { eax, ebx, ecx, edx }; descriptor->register_param_count_ = 4; @@ -360,19 +310,17 @@ void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( void BinaryOpICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, eax }; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); } void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { ecx, edx, eax }; descriptor->register_param_count_ = 3; @@ -383,7 +331,6 @@ void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( void StringAddStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { edx, eax }; descriptor->register_param_count_ = 2; @@ -482,10 +429,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // Update the static counter each time a new code stub is generated. - Isolate* isolate = masm->isolate(); - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); - CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); + CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); int param_count = descriptor->register_param_count_; { // Call the runtime system in a fresh internal frame. @@ -522,9 +468,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(argument_count, ecx); __ mov(Operand(esp, 0 * kPointerSize), - Immediate(ExternalReference::isolate_address(masm->isolate()))); + Immediate(ExternalReference::isolate_address(isolate()))); __ CallCFunction( - ExternalReference::store_buffer_overflow_function(masm->isolate()), + ExternalReference::store_buffer_overflow_function(isolate()), argument_count); if (save_doubles_ == kSaveFPRegs) { CpuFeatureScope scope(masm, SSE2); @@ -781,7 +727,7 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, void MathPowStub::Generate(MacroAssembler* masm) { CpuFeatureScope use_sse2(masm, SSE2); - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); const Register exponent = eax; const Register base = edx; const Register scratch = ecx; @@ -1010,11 +956,11 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ Cvtsi2sd(double_exponent, exponent); // Returning or bailing out. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); if (exponent_type_ == ON_STACK) { // The arguments are still on the stack. __ bind(&call_runtime); - __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); + __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); // The stub is called from non-optimized code, which expects the result // as heap number in exponent. @@ -1031,7 +977,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ movsd(Operand(esp, 0 * kDoubleSize), double_base); __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), 4); + ExternalReference::power_double_double_function(isolate()), 4); } // Return value is in st(0) on ia32. // Store it into the (fixed) result register. @@ -1056,7 +1002,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { Label miss; if (kind() == Code::KEYED_LOAD_IC) { - __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string())); + __ cmp(ecx, Immediate(isolate()->factory()->prototype_string())); __ j(not_equal, &miss); } @@ -1152,8 +1098,6 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { - Isolate* isolate = masm->isolate(); - // esp[0] : return address // esp[4] : number of parameters (tagged) // esp[8] : receiver displacement @@ -1285,7 +1229,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { __ j(zero, &skip_parameter_map); __ mov(FieldOperand(edi, FixedArray::kMapOffset), - Immediate(isolate->factory()->sloppy_arguments_elements_map())); + Immediate(isolate()->factory()->sloppy_arguments_elements_map())); __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); @@ -1306,7 +1250,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); __ add(ebx, Operand(esp, 4 * kPointerSize)); __ sub(ebx, eax); - __ mov(ecx, isolate->factory()->the_hole_value()); + __ mov(ecx, isolate()->factory()->the_hole_value()); __ mov(edx, edi); __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); // eax = loop variable (tagged) @@ -1341,7 +1285,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { // esp[16] = address of receiver argument // Copy arguments header and remaining slots (if there are any). __ mov(FieldOperand(edi, FixedArray::kMapOffset), - Immediate(isolate->factory()->fixed_array_map())); + Immediate(isolate()->factory()->fixed_array_map())); __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); Label arguments_loop, arguments_test; @@ -1377,8 +1321,6 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { - Isolate* isolate = masm->isolate(); - // esp[0] : return address // esp[4] : number of parameters // esp[8] : receiver displacement @@ -1449,7 +1391,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize)); __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); __ mov(FieldOperand(edi, FixedArray::kMapOffset), - Immediate(isolate->factory()->fixed_array_map())); + Immediate(isolate()->factory()->fixed_array_map())); __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); // Untag the length for the loop below. @@ -1496,14 +1438,13 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { static const int kJSRegExpOffset = 4 * kPointerSize; Label runtime; - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); // Ensure that a RegExp stack is allocated. ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address( - masm->isolate()); + ExternalReference::address_of_regexp_stack_memory_address(isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); + ExternalReference::address_of_regexp_stack_memory_size(isolate()); __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); __ test(ebx, ebx); __ j(zero, &runtime); @@ -1652,7 +1593,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // edx: code // ecx: encoding of subject string (1 if ASCII, 0 if two_byte); // All checks done. Now push arguments for native regexp code. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->regexp_entry_native(), 1); // Isolates: note we add an additional parameter here (isolate pointer). @@ -1661,7 +1602,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 9: Pass current isolate address. __ mov(Operand(esp, 8 * kPointerSize), - Immediate(ExternalReference::isolate_address(masm->isolate()))); + Immediate(ExternalReference::isolate_address(isolate()))); // Argument 8: Indicate that this is a direct call from JavaScript. __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); @@ -1678,7 +1619,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 5: static offsets vector buffer. __ mov(Operand(esp, 4 * kPointerSize), Immediate(ExternalReference::address_of_static_offsets_vector( - masm->isolate()))); + isolate()))); // Argument 2: Previous index. __ SmiUntag(ebx); @@ -1752,8 +1693,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. ExternalReference pending_exception(Isolate::kPendingExceptionAddress, - masm->isolate()); - __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); + isolate()); + __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); __ mov(eax, Operand::StaticVariable(pending_exception)); __ cmp(edx, eax); __ j(equal, &runtime); @@ -1834,7 +1775,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(masm->isolate()); + ExternalReference::address_of_static_offsets_vector(isolate()); __ mov(ecx, Immediate(address_of_static_offsets_vector)); // ebx: last_match_info backing store (FixedArray) @@ -2000,7 +1941,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { // Check for undefined. undefined OP undefined is false even though // undefined == undefined. Label check_for_nan; - __ cmp(edx, masm->isolate()->factory()->undefined_value()); + __ cmp(edx, isolate()->factory()->undefined_value()); __ j(not_equal, &check_for_nan, Label::kNear); __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); __ ret(0); @@ -2010,7 +1951,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { // Test for NaN. Compare heap numbers in a general way, // to hanlde NaNs correctly. __ cmp(FieldOperand(edx, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->heap_number_map())); + Immediate(isolate()->factory()->heap_number_map())); __ j(equal, &generic_heap_number_comparison, Label::kNear); if (cc != equal) { // Call runtime on identical JSObjects. Otherwise return equal. @@ -2055,7 +1996,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { // Check if the non-smi operand is a heap number. __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->heap_number_map())); + Immediate(isolate()->factory()->heap_number_map())); // If heap number, handle it in the slow case. __ j(equal, &slow, Label::kNear); // Return non-equal (ebx is not zero) @@ -2284,8 +2225,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // If we didn't have a matching function, and we didn't find the megamorph // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in ecx. - Handle<Map> allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); + Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map(); __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); __ j(not_equal, &miss); @@ -2332,7 +2272,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ push(edx); __ push(ebx); - CreateAllocationSiteStub create_stub; + CreateAllocationSiteStub create_stub(isolate); __ CallStub(&create_stub); __ pop(ebx); @@ -2363,12 +2303,65 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { } +static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { + // Do not transform the receiver for strict mode functions. + __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), + 1 << SharedFunctionInfo::kStrictModeBitWithinByte); + __ j(not_equal, cont); + + // Do not transform the receiver for natives (shared already in ecx). + __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), + 1 << SharedFunctionInfo::kNativeBitWithinByte); + __ j(not_equal, cont); +} + + +static void EmitSlowCase(Isolate* isolate, + MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, non_function); + __ pop(ecx); + __ push(edi); // put proxy as additional argument under return address + __ push(ecx); + __ Move(eax, Immediate(argc + 1)); + __ Move(ebx, Immediate(0)); + __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); + { + Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ mov(Operand(esp, (argc + 1) * kPointerSize), edi); + __ Move(eax, Immediate(argc)); + __ Move(ebx, Immediate(0)); + __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); + Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); +} + + +static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { + // Wrap the receiver and patch it back onto the stack. + { FrameScope frame_scope(masm, StackFrame::INTERNAL); + __ push(edi); + __ push(eax); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ pop(edi); + } + __ mov(Operand(esp, (argc + 1) * kPointerSize), eax); + __ jmp(cont); +} + + void CallFunctionStub::Generate(MacroAssembler* masm) { - // ebx : feedback vector - // edx : (only if ebx is not the megamorphic symbol) slot in feedback - // vector (Smi) // edi : the function to call - Isolate* isolate = masm->isolate(); Label slow, non_function, wrap, cont; if (NeedsChecks()) { @@ -2378,14 +2371,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Goto slow case if we do not have a function. __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); __ j(not_equal, &slow); - - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm); - // Type information was updated. Because we may call Array, which - // expects either undefined or an AllocationSite in ebx we need - // to set ebx to undefined. - __ mov(ebx, Immediate(isolate->factory()->undefined_value())); - } } // Fast-case: Just invoke the function. @@ -2393,16 +2378,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { if (CallAsMethod()) { if (NeedsChecks()) { - // Do not transform the receiver for strict mode functions. - __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), - 1 << SharedFunctionInfo::kStrictModeBitWithinByte); - __ j(not_equal, &cont); - - // Do not transform the receiver for natives (shared already in ecx). - __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), - 1 << SharedFunctionInfo::kNativeBitWithinByte); - __ j(not_equal, &cont); + EmitContinueIfStrictOrNative(masm, &cont); } // Load the receiver from the stack. @@ -2425,50 +2401,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { if (NeedsChecks()) { // Slow-case: Non-function called. __ bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable - // object (megamorphic symbol) so no write barrier is needed. - __ mov(FieldOperand(ebx, edx, times_half_pointer_size, - FixedArray::kHeaderSize), - Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); - } - // Check for function proxy. - __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function); - __ pop(ecx); - __ push(edi); // put proxy as additional argument under return address - __ push(ecx); - __ Move(eax, Immediate(argc_ + 1)); - __ Move(ebx, Immediate(0)); - __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); - { - Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); - __ jmp(adaptor, RelocInfo::CODE_TARGET); - } - - // CALL_NON_FUNCTION expects the non-function callee as receiver (instead - // of the original receiver from the call site). - __ bind(&non_function); - __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); - __ Move(eax, Immediate(argc_)); - __ Move(ebx, Immediate(0)); - __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); - Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); - __ jmp(adaptor, RelocInfo::CODE_TARGET); + // (non_function is bound in EmitSlowCase) + EmitSlowCase(isolate(), masm, argc_, &non_function); } if (CallAsMethod()) { __ bind(&wrap); - // Wrap the receiver and patch it back onto the stack. - { FrameScope frame_scope(masm, StackFrame::INTERNAL); - __ push(edi); - __ push(eax); - __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); - __ pop(edi); - } - __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax); - __ jmp(&cont); + EmitWrapCase(masm, argc_, &cont); } } @@ -2502,10 +2441,10 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize)); Handle<Map> allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); + isolate()->factory()->allocation_site_map(); __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); __ j(equal, &feedback_register_initialized); - __ mov(ebx, masm->isolate()->factory()->undefined_value()); + __ mov(ebx, isolate()->factory()->undefined_value()); __ bind(&feedback_register_initialized); } @@ -2536,11 +2475,123 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // Set expected number of arguments to zero (not changing eax). __ Move(ebx, Immediate(0)); Handle<Code> arguments_adaptor = - masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + isolate()->builtins()->ArgumentsAdaptorTrampoline(); __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); } +static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { + __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); + __ mov(vector, FieldOperand(vector, + SharedFunctionInfo::kFeedbackVectorOffset)); +} + + +void CallICStub::Generate(MacroAssembler* masm) { + // edi - function + // edx - slot id + Isolate* isolate = masm->isolate(); + Label extra_checks_or_miss, slow_start; + Label slow, non_function, wrap, cont; + Label have_js_function; + int argc = state_.arg_count(); + ParameterCount actual(argc); + + EmitLoadTypeFeedbackVector(masm, ebx); + + // The checks. First, does edi match the recorded monomorphic target? + __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); + __ j(not_equal, &extra_checks_or_miss); + + __ bind(&have_js_function); + if (state_.CallAsMethod()) { + EmitContinueIfStrictOrNative(masm, &cont); + + // Load the receiver from the stack. + __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); + + __ JumpIfSmi(eax, &wrap); + + __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); + __ j(below, &wrap); + + __ bind(&cont); + } + + __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); + + __ bind(&slow); + EmitSlowCase(isolate, masm, argc, &non_function); + + if (state_.CallAsMethod()) { + __ bind(&wrap); + EmitWrapCase(masm, argc, &cont); + } + + __ bind(&extra_checks_or_miss); + Label miss; + + __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); + __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); + __ j(equal, &slow_start); + __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate))); + __ j(equal, &miss); + + if (!FLAG_trace_ic) { + // We are going megamorphic, and we don't want to visit the runtime. + __ mov(FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize), + Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); + __ jmp(&slow_start); + } + + // We are here because tracing is on or we are going monomorphic. + __ bind(&miss); + GenerateMiss(masm); + + // the slow case + __ bind(&slow_start); + + // Check that the function really is a JavaScript function. + __ JumpIfSmi(edi, &non_function); + + // Goto slow case if we do not have a function. + __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); + __ j(not_equal, &slow); + __ jmp(&have_js_function); + + // Unreachable + __ int3(); +} + + +void CallICStub::GenerateMiss(MacroAssembler* masm) { + // Get the receiver of the function from the stack; 1 ~ return address. + __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize)); + + { + FrameScope scope(masm, StackFrame::INTERNAL); + + // Push the receiver and the function and feedback info. + __ push(ecx); + __ push(edi); + __ push(ebx); + __ push(edx); + + // Call the entry. + ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), + masm->isolate()); + __ CallExternalReference(miss, 4); + + // Move result to edi and exit the internal frame. + __ mov(edi, eax); + } +} + + bool CEntryStub::NeedsImmovableCode() { return false; } @@ -2553,8 +2604,8 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { // It is important that the store buffer overflow stubs are generated first. ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); CreateAllocationSiteStub::GenerateAheadOfTime(isolate); - if (Serializer::enabled()) { - PlatformFeatureScope sse2(SSE2); + if (Serializer::enabled(isolate)) { + PlatformFeatureScope sse2(isolate, SSE2); BinaryOpICStub::GenerateAheadOfTime(isolate); BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); } else { @@ -2566,12 +2617,12 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { void CodeStub::GenerateFPStubs(Isolate* isolate) { if (CpuFeatures::IsSupported(SSE2)) { - CEntryStub save_doubles(1, kSaveFPRegs); + CEntryStub save_doubles(isolate, 1, kSaveFPRegs); // Stubs might already be in the snapshot, detect that and don't regenerate, // which would lead to code stub initialization state being messed up. Code* save_doubles_code; - if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { - save_doubles_code = *(save_doubles.GetCode(isolate)); + if (!save_doubles.FindCodeInCache(&save_doubles_code)) { + save_doubles_code = *(save_doubles.GetCode()); } isolate->set_fp_stubs_generated(true); } @@ -2579,17 +2630,24 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) { void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { - CEntryStub stub(1, kDontSaveFPRegs); - stub.GetCode(isolate); + CEntryStub stub(isolate, 1, kDontSaveFPRegs); + stub.GetCode(); } -void CEntryStub::GenerateCore(MacroAssembler* masm, - Label* throw_normal_exception, - Label* throw_termination_exception, - bool do_gc, - bool always_allocate_scope) { - // eax: result parameter for PerformGC, if any +void CEntryStub::Generate(MacroAssembler* masm) { + // eax: number of arguments including receiver + // ebx: pointer to C function (C callee-saved) + // ebp: frame pointer (restored after C call) + // esp: stack pointer (restored after C call) + // esi: current context (C callee-saved) + // edi: JS function of the caller (C callee-saved) + + ProfileEntryHookStub::MaybeCallEntryHook(masm); + + // Enter the exit frame that transitions from JavaScript to C++. + __ EnterExitFrame(save_doubles_ == kSaveFPRegs); + // ebx: pointer to C function (C callee-saved) // ebp: frame pointer (restored after C call) // esp: stack pointer (restored after C call) @@ -2603,62 +2661,37 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ CheckStackAlignment(); } - if (do_gc) { - // Pass failure code returned from last attempt as first argument to - // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the - // stack alignment is known to be correct. This function takes one argument - // which is passed on the stack, and we know that the stack has been - // prepared to pass at least one argument. - __ mov(Operand(esp, 1 * kPointerSize), - Immediate(ExternalReference::isolate_address(masm->isolate()))); - __ mov(Operand(esp, 0 * kPointerSize), eax); // Result. - __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY); - } - - ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); - if (always_allocate_scope) { - __ inc(Operand::StaticVariable(scope_depth)); - } - // Call C function. __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. __ mov(Operand(esp, 2 * kPointerSize), - Immediate(ExternalReference::isolate_address(masm->isolate()))); + Immediate(ExternalReference::isolate_address(isolate()))); __ call(ebx); // Result is in eax or edx:eax - do not destroy these registers! - if (always_allocate_scope) { - __ dec(Operand::StaticVariable(scope_depth)); - } - // Runtime functions should not return 'the hole'. Allowing it to escape may // lead to crashes in the IC code later. if (FLAG_debug_code) { Label okay; - __ cmp(eax, masm->isolate()->factory()->the_hole_value()); + __ cmp(eax, isolate()->factory()->the_hole_value()); __ j(not_equal, &okay, Label::kNear); __ int3(); __ bind(&okay); } - // Check for failure result. - Label failure_returned; - STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); - __ lea(ecx, Operand(eax, 1)); - // Lower 2 bits of ecx are 0 iff eax has failure tag. - __ test(ecx, Immediate(kFailureTagMask)); - __ j(zero, &failure_returned); + // Check result for exception sentinel. + Label exception_returned; + __ cmp(eax, isolate()->factory()->exception()); + __ j(equal, &exception_returned); ExternalReference pending_exception_address( - Isolate::kPendingExceptionAddress, masm->isolate()); + Isolate::kPendingExceptionAddress, isolate()); // Check that there is no pending exception, otherwise we - // should have returned some failure value. + // should have returned the exception sentinel. if (FLAG_debug_code) { __ push(edx); - __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); + __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); Label okay; __ cmp(edx, Operand::StaticVariable(pending_exception_address)); // Cannot use check here as it attempts to generate call into runtime. @@ -2672,96 +2705,27 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); __ ret(0); - // Handling of failure. - __ bind(&failure_returned); - - Label retry; - // If the returned exception is RETRY_AFTER_GC continue at retry label - STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); - __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); - __ j(zero, &retry, Label::kNear); + // Handling of exception. + __ bind(&exception_returned); // Retrieve the pending exception. __ mov(eax, Operand::StaticVariable(pending_exception_address)); // Clear the pending exception. - __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); + __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); __ mov(Operand::StaticVariable(pending_exception_address), edx); // Special handling of termination exceptions which are uncatchable // by javascript code. - __ cmp(eax, masm->isolate()->factory()->termination_exception()); - __ j(equal, throw_termination_exception); - - // Handle normal exception. - __ jmp(throw_normal_exception); - - // Retry. - __ bind(&retry); -} - - -void CEntryStub::Generate(MacroAssembler* masm) { - // eax: number of arguments including receiver - // ebx: pointer to C function (C callee-saved) - // ebp: frame pointer (restored after C call) - // esp: stack pointer (restored after C call) - // esi: current context (C callee-saved) - // edi: JS function of the caller (C callee-saved) - - ProfileEntryHookStub::MaybeCallEntryHook(masm); - - // NOTE: Invocations of builtins may return failure objects instead - // of a proper result. The builtin entry handles this by performing - // a garbage collection and retrying the builtin (twice). - - // Enter the exit frame that transitions from JavaScript to C++. - __ EnterExitFrame(save_doubles_ == kSaveFPRegs); - - // eax: result parameter for PerformGC, if any (setup below) - // ebx: pointer to builtin function (C callee-saved) - // ebp: frame pointer (restored after C call) - // esp: stack pointer (restored after C call) - // edi: number of arguments including receiver (C callee-saved) - // esi: argv pointer (C callee-saved) - - Label throw_normal_exception; Label throw_termination_exception; + __ cmp(eax, isolate()->factory()->termination_exception()); + __ j(equal, &throw_termination_exception); - // Call into the runtime system. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - false, - false); - - // Do space-specific GC and retry runtime call. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - false); - - // Do full GC and retry runtime call one final time. - Failure* failure = Failure::InternalError(); - __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure))); - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - true); - - { FrameScope scope(masm, StackFrame::MANUAL); - __ PrepareCallCFunction(0, eax); - __ CallCFunction( - ExternalReference::out_of_memory_function(masm->isolate()), 0); - } + // Handle normal exception. + __ Throw(eax); __ bind(&throw_termination_exception); __ ThrowUncatchable(eax); - - __ bind(&throw_normal_exception); - __ Throw(eax); } @@ -2785,12 +2749,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ push(ebx); // Save copies of the top frame descriptor on the stack. - ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate()); + ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); __ push(Operand::StaticVariable(c_entry_fp)); // If this is the outermost JS call, set js_entry_sp value. - ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, - masm->isolate()); + ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); __ j(not_equal, ¬_outermost_js, Label::kNear); __ mov(Operand::StaticVariable(js_entry_sp), ebp); @@ -2807,9 +2770,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Caught exception: Store result (exception) in the pending exception // field in the JSEnv and return a failure sentinel. ExternalReference pending_exception(Isolate::kPendingExceptionAddress, - masm->isolate()); + isolate()); __ mov(Operand::StaticVariable(pending_exception), eax); - __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception())); + __ mov(eax, Immediate(isolate()->factory()->exception())); __ jmp(&exit); // Invoke: Link this frame into the handler chain. There's only one @@ -2818,7 +2781,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ PushTryHandler(StackHandler::JS_ENTRY, 0); // Clear any pending exceptions. - __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); + __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); __ mov(Operand::StaticVariable(pending_exception), edx); // Fake a receiver (NULL). @@ -2830,11 +2793,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // builtin stubs may not have been generated yet. if (is_construct) { ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, - masm->isolate()); + isolate()); __ mov(edx, Immediate(construct_entry)); } else { - ExternalReference entry(Builtins::kJSEntryTrampoline, - masm->isolate()); + ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); __ mov(edx, Immediate(entry)); } __ mov(edx, Operand(edx, 0)); // deref address @@ -2854,8 +2816,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Restore the top frame descriptor from the stack. __ pop(Operand::StaticVariable(ExternalReference( - Isolate::kCEntryFPAddress, - masm->isolate()))); + Isolate::kCEntryFPAddress, isolate()))); // Restore callee-saved registers (C calling conventions). __ pop(ebx); @@ -2968,7 +2929,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ bind(&loop); __ cmp(scratch, prototype); __ j(equal, &is_instance, Label::kNear); - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); __ cmp(scratch, Immediate(factory->null_value())); __ j(equal, &is_not_instance, Label::kNear); __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); @@ -3285,7 +3246,7 @@ void StringHelper::GenerateHashInit(MacroAssembler* masm, Register character, Register scratch) { // hash = (seed + character) + ((seed + character) << 10); - if (Serializer::enabled()) { + if (Serializer::enabled(masm->isolate())) { __ LoadRoot(scratch, Heap::kHashSeedRootIndex); __ SmiUntag(scratch); __ add(scratch, character); @@ -3380,7 +3341,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { // Longer than original string's length or negative: unsafe arguments. __ j(above, &runtime); // Return original string. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1); __ ret(3 * kPointerSize); __ bind(¬_original_string); @@ -3402,7 +3363,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ test(ebx, Immediate(kIsIndirectStringMask)); __ j(zero, &seq_or_external_string, Label::kNear); - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); __ test(ebx, Immediate(kSlicedNotConsMask)); __ j(not_zero, &sliced_string, Label::kNear); // Cons string. Check whether it is flat, then fetch first part. @@ -3722,7 +3683,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { STATIC_ASSERT(EQUAL == 0); STATIC_ASSERT(kSmiTag == 0); __ Move(eax, Immediate(Smi::FromInt(EQUAL))); - __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1); + __ IncrementCounter(isolate()->counters()->string_compare_native(), 1); __ ret(2 * kPointerSize); __ bind(¬_same); @@ -3744,223 +3705,30 @@ void StringCompareStub::Generate(MacroAssembler* masm) { } -void ArrayPushStub::Generate(MacroAssembler* masm) { - int argc = arguments_count(); - - if (argc == 0) { - // Noop, return the length. - __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); - __ ret((argc + 1) * kPointerSize); - return; - } - - Isolate* isolate = masm->isolate(); - - if (argc != 1) { - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - Label call_builtin, attempt_to_grow_elements, with_write_barrier; - - // Get the elements array of the object. - __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check that the elements are in fast mode and writable. - __ cmp(FieldOperand(edi, HeapObject::kMapOffset), - isolate->factory()->fixed_array_map()); - __ j(not_equal, &call_builtin); - } - - // Get the array's length into eax and calculate new length. - __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); - STATIC_ASSERT(kSmiTagSize == 1); - STATIC_ASSERT(kSmiTag == 0); - __ add(eax, Immediate(Smi::FromInt(argc))); - - // Get the elements' length into ecx. - __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); - - // Check if we could survive without allocation. - __ cmp(eax, ecx); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - __ j(greater, &attempt_to_grow_elements); - - // Check if value is a smi. - __ mov(ecx, Operand(esp, argc * kPointerSize)); - __ JumpIfNotSmi(ecx, &with_write_barrier); - - // Store the value. - __ mov(FieldOperand(edi, eax, times_half_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize), - ecx); - } else { - __ j(greater, &call_builtin); - - __ mov(ecx, Operand(esp, argc * kPointerSize)); - __ StoreNumberToDoubleElements( - ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize); - } - - // Save new length. - __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); - __ ret((argc + 1) * kPointerSize); - - if (IsFastDoubleElementsKind(elements_kind())) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ bind(&with_write_barrier); - - if (IsFastSmiElementsKind(elements_kind())) { - if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); - - __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), - isolate->factory()->heap_number_map()); - __ j(equal, &call_builtin); - - ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) - ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; - __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX)); - __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset)); - __ mov(ebx, ContextOperand(ebx, Context::JS_ARRAY_MAPS_INDEX)); - const int header_size = FixedArrayBase::kHeaderSize; - // Verify that the object can be transitioned in place. - const int origin_offset = header_size + elements_kind() * kPointerSize; - __ mov(edi, FieldOperand(ebx, origin_offset)); - __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset)); - __ j(not_equal, &call_builtin); - - const int target_offset = header_size + target_kind * kPointerSize; - __ mov(ebx, FieldOperand(ebx, target_offset)); - ElementsTransitionGenerator::GenerateMapChangeElementsTransition( - masm, DONT_TRACK_ALLOCATION_SITE, NULL); - // Restore edi used as a scratch register for the write barrier used while - // setting the map. - __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); - } - - // Save new length. - __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); - - // Store the value. - __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize)); - __ mov(Operand(edx, 0), ecx); - - __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - __ ret((argc + 1) * kPointerSize); - - __ bind(&attempt_to_grow_elements); - if (!FLAG_inline_new) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ mov(ebx, Operand(esp, argc * kPointerSize)); - // Growing elements that are SMI-only requires special handling in case the - // new element is non-Smi. For now, delegate to the builtin. - if (IsFastSmiElementsKind(elements_kind())) { - __ JumpIfNotSmi(ebx, &call_builtin); - } - - // We could be lucky and the elements array could be at the top of new-space. - // In this case we can just grow it in place by moving the allocation pointer - // up. - ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(isolate); - ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(isolate); - - const int kAllocationDelta = 4; - ASSERT(kAllocationDelta >= argc); - // Load top. - __ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); - - // Check if it's the end of elements. - __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize)); - __ cmp(edx, ecx); - __ j(not_equal, &call_builtin); - __ add(ecx, Immediate(kAllocationDelta * kPointerSize)); - __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit)); - __ j(above, &call_builtin); - - // We fit and could grow elements. - __ mov(Operand::StaticVariable(new_space_allocation_top), ecx); - - // Push the argument... - __ mov(Operand(edx, 0), ebx); - // ... and fill the rest with holes. - for (int i = 1; i < kAllocationDelta; i++) { - __ mov(Operand(edx, i * kPointerSize), - isolate->factory()->the_hole_value()); - } - - if (IsFastObjectElementsKind(elements_kind())) { - // We know the elements array is in new space so we don't need the - // remembered set, but we just pushed a value onto it so we may have to tell - // the incremental marker to rescan the object that we just grew. We don't - // need to worry about the holes because they are in old space and already - // marked black. - __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); - } - - // Restore receiver to edx as finish sequence assumes it's here. - __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); - - // Increment element's and array's sizes. - __ add(FieldOperand(edi, FixedArray::kLengthOffset), - Immediate(Smi::FromInt(kAllocationDelta))); - - // NOTE: This only happen in new-space, where we don't care about the - // black-byte-count on pages. Otherwise we should update that too if the - // object is black. - - __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); - __ ret((argc + 1) * kPointerSize); - - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); -} - - void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- edx : left // -- eax : right // -- esp[0] : return address // ----------------------------------- - Isolate* isolate = masm->isolate(); // Load ecx with the allocation site. We stick an undefined dummy value here // and replace it with the real allocation site later when we instantiate this // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). - __ mov(ecx, handle(isolate->heap()->undefined_value())); + __ mov(ecx, handle(isolate()->heap()->undefined_value())); // Make sure that we actually patched the allocation site. if (FLAG_debug_code) { __ test(ecx, Immediate(kSmiTagMask)); __ Assert(not_equal, kExpectedAllocationSite); __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), - isolate->factory()->allocation_site_map()); + isolate()->factory()->allocation_site_map()); __ Assert(equal, kExpectedAllocationSite); } // Tail call into the stub that handles binary operations with allocation // sites. - BinaryOpWithAllocationSiteStub stub(state_); + BinaryOpWithAllocationSiteStub stub(isolate(), state_); __ TailCallStub(&stub); } @@ -4015,7 +3783,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { Label done, left, left_smi, right_smi; __ JumpIfSmi(eax, &right_smi, Label::kNear); __ cmp(FieldOperand(eax, HeapObject::kMapOffset), - masm->isolate()->factory()->heap_number_map()); + isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined1, Label::kNear); __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); __ jmp(&left, Label::kNear); @@ -4027,7 +3795,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&left); __ JumpIfSmi(edx, &left_smi, Label::kNear); __ cmp(FieldOperand(edx, HeapObject::kMapOffset), - masm->isolate()->factory()->heap_number_map()); + isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined2, Label::kNear); __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); __ jmp(&done); @@ -4057,22 +3825,22 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ JumpIfSmi(ecx, &generic_stub, Label::kNear); __ cmp(FieldOperand(eax, HeapObject::kMapOffset), - masm->isolate()->factory()->heap_number_map()); + isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined1, Label::kNear); __ cmp(FieldOperand(edx, HeapObject::kMapOffset), - masm->isolate()->factory()->heap_number_map()); + isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined2, Label::kNear); } __ bind(&unordered); __ bind(&generic_stub); - ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, + ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, CompareIC::GENERIC); - __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); __ bind(&maybe_undefined1); if (Token::IsOrderedRelationalCompareOp(op_)) { - __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value())); + __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); __ j(not_equal, &miss); __ JumpIfSmi(edx, &unordered); __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); @@ -4082,7 +3850,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&maybe_undefined2); if (Token::IsOrderedRelationalCompareOp(op_)) { - __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value())); + __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); __ j(equal, &unordered); } @@ -4317,7 +4085,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { { // Call the runtime system in a fresh internal frame. ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), - masm->isolate()); + isolate()); FrameScope scope(masm, StackFrame::INTERNAL); __ push(edx); // Preserve edx and eax. __ push(eax); @@ -4391,7 +4159,8 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, __ bind(&good); } - NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, + NEGATIVE_LOOKUP); __ push(Immediate(Handle<Object>(name))); __ push(Immediate(name->Hash())); __ CallStub(&stub); @@ -4447,7 +4216,8 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, __ j(equal, done); } - NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0, + POSITIVE_LOOKUP); __ push(name); __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); __ shr(r0, Name::kHashShift); @@ -4507,7 +4277,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { index_, times_pointer_size, kElementsStartOffset - kHeapObjectTag)); - __ cmp(scratch, masm->isolate()->factory()->undefined_value()); + __ cmp(scratch, isolate()->factory()->undefined_value()); __ j(equal, ¬_in_dictionary); // Stop if found the property. @@ -4550,11 +4320,11 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - StoreBufferOverflowStub stub(kDontSaveFPRegs); - stub.GetCode(isolate); - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { - StoreBufferOverflowStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); + stub.GetCode(); + if (CpuFeatures::IsSafeForSnapshot(isolate, SSE2)) { + StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } } @@ -4653,12 +4423,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. __ mov(Operand(esp, 2 * kPointerSize), - Immediate(ExternalReference::isolate_address(masm->isolate()))); + Immediate(ExternalReference::isolate_address(isolate()))); AllowExternalCallThatCantCauseGC scope(masm); __ CallCFunction( - ExternalReference::incremental_marking_record_write_function( - masm->isolate()), + ExternalReference::incremental_marking_record_write_function(isolate()), argument_count); regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); @@ -4845,8 +4614,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { - CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); - __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); + __ call(ces.GetCode(), RelocInfo::CODE_TARGET); int parameter_count_offset = StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; __ mov(ebx, MemOperand(ebp, parameter_count_offset)); @@ -4862,7 +4631,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != NULL) { - ProfileEntryHookStub stub; + ProfileEntryHookStub stub(masm->isolate()); masm->CallStub(&stub); } } @@ -4886,8 +4655,8 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { __ push(eax); // Call the entry hook. - ASSERT(masm->isolate()->function_entry_hook() != NULL); - __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()), + ASSERT(isolate()->function_entry_hook() != NULL); + __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), RelocInfo::RUNTIME_ENTRY); __ add(esp, Immediate(2 * kPointerSize)); @@ -4904,7 +4673,8 @@ template<class T> static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { if (mode == DISABLE_ALLOCATION_SITES) { - T stub(GetInitialFastElementsKind(), + T stub(masm->isolate(), + GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -4915,7 +4685,7 @@ static void CreateArrayDispatch(MacroAssembler* masm, ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(edx, kind); __ j(not_equal, &next); - T stub(kind); + T stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } @@ -4959,12 +4729,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); - ArraySingleArgumentConstructorStub stub_holey(holey_initial, + ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), + holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); __ bind(&normal_sequence); - ArraySingleArgumentConstructorStub stub(initial, + ArraySingleArgumentConstructorStub stub(masm->isolate(), + initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -4994,7 +4766,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(edx, kind); __ j(not_equal, &next); - ArraySingleArgumentConstructorStub stub(kind); + ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } @@ -5013,11 +4785,11 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); - stub.GetCode(isolate); + T stub(isolate, kind); + stub.GetCode(); if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { - T stub1(kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(isolate); + T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); + stub1.GetCode(); } } } @@ -5038,12 +4810,12 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things - InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); - stubh1.GetCode(isolate); - InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); - stubh2.GetCode(isolate); - InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); - stubh3.GetCode(isolate); + InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); + stubh1.GetCode(); + InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); + stubh2.GetCode(); + InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); + stubh3.GetCode(); } } @@ -5103,7 +4875,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { Label no_info; // If the feedback vector is the undefined value call an array constructor // that doesn't use AllocationSites. - __ cmp(ebx, masm->isolate()->factory()->undefined_value()); + __ cmp(ebx, isolate()->factory()->undefined_value()); __ j(equal, &no_info); // Only look at the lower 16 bits of the transition info. @@ -5125,7 +4897,7 @@ void InternalArrayConstructorStub::GenerateCase( __ test(eax, eax); __ j(not_zero, ¬_zero_case); - InternalArrayNoArgumentConstructorStub stub0(kind); + InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0); __ bind(¬_zero_case); @@ -5140,16 +4912,16 @@ void InternalArrayConstructorStub::GenerateCase( __ j(zero, &normal_sequence); InternalArraySingleArgumentConstructorStub - stub1_holey(GetHoleyElementsKind(kind)); + stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey); } __ bind(&normal_sequence); - InternalArraySingleArgumentConstructorStub stub1(kind); + InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); __ bind(¬_one_case); - InternalArrayNArgumentsConstructorStub stubN(kind); + InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); __ TailCallStub(&stubN); } @@ -5242,8 +5014,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kArgsLength == 7); - Isolate* isolate = masm->isolate(); - __ pop(return_address); // context save @@ -5260,9 +5030,9 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { Register scratch = call_data; if (!call_data_undefined) { // return value - __ push(Immediate(isolate->factory()->undefined_value())); + __ push(Immediate(isolate()->factory()->undefined_value())); // return value default - __ push(Immediate(isolate->factory()->undefined_value())); + __ push(Immediate(isolate()->factory()->undefined_value())); } else { // return value __ push(scratch); @@ -5270,7 +5040,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ push(scratch); } // isolate - __ push(Immediate(reinterpret_cast<int>(isolate))); + __ push(Immediate(reinterpret_cast<int>(isolate()))); // holder __ push(holder); @@ -5305,7 +5075,8 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ lea(scratch, ApiParameterOperand(2)); __ mov(ApiParameterOperand(0), scratch); - Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); + ExternalReference thunk_ref = + ExternalReference::invoke_function_callback(isolate()); Operand context_restore_operand(ebp, (2 + FCA::kContextSaveIndex) * kPointerSize); @@ -5318,7 +5089,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { } Operand return_value_operand(ebp, return_value_offset * kPointerSize); __ CallApiFunctionAndReturn(api_function_address, - thunk_address, + thunk_ref, ApiParameterOperand(1), argc + FCA::kArgsLength + 1, return_value_operand, @@ -5353,10 +5124,11 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { __ add(scratch, Immediate(kPointerSize)); __ mov(ApiParameterOperand(1), scratch); // arguments pointer. - Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); + ExternalReference thunk_ref = + ExternalReference::invoke_accessor_getter_callback(isolate()); __ CallApiFunctionAndReturn(api_function_address, - thunk_address, + thunk_ref, ApiParameterOperand(2), kStackSpace, Operand(ebp, 7 * kPointerSize), diff --git a/deps/v8/src/ia32/code-stubs-ia32.h b/deps/v8/src/ia32/code-stubs-ia32.h index cf20a11c6..1d55ec3c0 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.h +++ b/deps/v8/src/ia32/code-stubs-ia32.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_CODE_STUBS_IA32_H_ #define V8_IA32_CODE_STUBS_IA32_H_ @@ -42,9 +19,10 @@ void ArrayNativeCode(MacroAssembler* masm, class StoreBufferOverflowStub: public PlatformCodeStub { public: - explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) - : save_doubles_(save_fp) { - ASSERT(CpuFeatures::IsSafeForSnapshot(SSE2) || save_fp == kDontSaveFPRegs); + StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp) + : PlatformCodeStub(isolate), save_doubles_(save_fp) { + ASSERT(CpuFeatures::IsSafeForSnapshot(isolate, SSE2) || + save_fp == kDontSaveFPRegs); } void Generate(MacroAssembler* masm); @@ -92,7 +70,7 @@ class StringHelper : public AllStatic { class SubStringStub: public PlatformCodeStub { public: - SubStringStub() {} + explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: Major MajorKey() { return SubString; } @@ -104,7 +82,7 @@ class SubStringStub: public PlatformCodeStub { class StringCompareStub: public PlatformCodeStub { public: - StringCompareStub() { } + explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { } // Compares two flat ASCII strings and returns result in eax. static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, @@ -142,11 +120,13 @@ class NameDictionaryLookupStub: public PlatformCodeStub { public: enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; - NameDictionaryLookupStub(Register dictionary, + NameDictionaryLookupStub(Isolate* isolate, + Register dictionary, Register result, Register index, LookupMode mode) - : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { } + : PlatformCodeStub(isolate), + dictionary_(dictionary), result_(result), index_(index), mode_(mode) { } void Generate(MacroAssembler* masm); @@ -202,12 +182,14 @@ class NameDictionaryLookupStub: public PlatformCodeStub { class RecordWriteStub: public PlatformCodeStub { public: - RecordWriteStub(Register object, + RecordWriteStub(Isolate* isolate, + Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) - : object_(object), + : PlatformCodeStub(isolate), + object_(object), value_(value), address_(address), remembered_set_action_(remembered_set_action), @@ -215,7 +197,8 @@ class RecordWriteStub: public PlatformCodeStub { regs_(object, // An input reg. address, // An input reg. value) { // One scratch reg. - ASSERT(CpuFeatures::IsSafeForSnapshot(SSE2) || fp_mode == kDontSaveFPRegs); + ASSERT(CpuFeatures::IsSafeForSnapshot(isolate, SSE2) || + fp_mode == kDontSaveFPRegs); } enum Mode { diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc index 350a8fb22..19b66aee3 100644 --- a/deps/v8/src/ia32/codegen-ia32.cc +++ b/deps/v8/src/ia32/codegen-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1073,37 +1050,33 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, #undef __ -static byte* GetNoCodeAgeSequence(uint32_t* length) { - static bool initialized = false; - static byte sequence[kNoCodeAgeSequenceLength]; - *length = kNoCodeAgeSequenceLength; - if (!initialized) { - // The sequence of instructions that is patched out for aging code is the - // following boilerplate stack-building prologue that is found both in - // FUNCTION and OPTIMIZED_FUNCTION code: - CodePatcher patcher(sequence, kNoCodeAgeSequenceLength); - patcher.masm()->push(ebp); - patcher.masm()->mov(ebp, esp); - patcher.masm()->push(esi); - patcher.masm()->push(edi); - initialized = true; - } - return sequence; +CodeAgingHelper::CodeAgingHelper() { + ASSERT(young_sequence_.length() == kNoCodeAgeSequenceLength); + CodePatcher patcher(young_sequence_.start(), young_sequence_.length()); + patcher.masm()->push(ebp); + patcher.masm()->mov(ebp, esp); + patcher.masm()->push(esi); + patcher.masm()->push(edi); +} + + +#ifdef DEBUG +bool CodeAgingHelper::IsOld(byte* candidate) const { + return *candidate == kCallOpcode; } +#endif -bool Code::IsYoungSequence(byte* sequence) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); - bool result = (!memcmp(sequence, young_sequence, young_length)); - ASSERT(result || *sequence == kCallOpcode); +bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) { + bool result = isolate->code_aging_helper()->IsYoung(sequence); + ASSERT(result || isolate->code_aging_helper()->IsOld(sequence)); return result; } -void Code::GetCodeAgeAndParity(byte* sequence, Age* age, +void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity) { - if (IsYoungSequence(sequence)) { + if (IsYoungSequence(isolate, sequence)) { *age = kNoAgeCodeAge; *parity = NO_MARKING_PARITY; } else { @@ -1120,10 +1093,9 @@ void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age, MarkingParity parity) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); + uint32_t young_length = isolate->code_aging_helper()->young_sequence_length(); if (age == kNoAgeCodeAge) { - CopyBytes(sequence, young_sequence, young_length); + isolate->code_aging_helper()->CopyYoungSequenceTo(sequence); CPU::FlushICache(sequence, young_length); } else { Code* stub = GetCodeAgeStub(isolate, age, parity); diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h index 2ef804307..eda92b0a8 100644 --- a/deps/v8/src/ia32/codegen-ia32.h +++ b/deps/v8/src/ia32/codegen-ia32.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_CODEGEN_IA32_H_ #define V8_IA32_CODEGEN_IA32_H_ diff --git a/deps/v8/src/ia32/cpu-ia32.cc b/deps/v8/src/ia32/cpu-ia32.cc index 5fb04fc72..7f87a624e 100644 --- a/deps/v8/src/ia32/cpu-ia32.cc +++ b/deps/v8/src/ia32/cpu-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // CPU specific code for ia32 independent of OS goes here. @@ -41,16 +18,6 @@ namespace v8 { namespace internal { -void CPU::SetUp() { - CpuFeatures::Probe(); -} - - -bool CPU::SupportsCrankshaft() { - return CpuFeatures::IsSupported(SSE2); -} - - void CPU::FlushICache(void* start, size_t size) { // No need to flush the instruction cache on Intel. On Intel instruction // cache flushing is only necessary when multiple cores running the same diff --git a/deps/v8/src/ia32/debug-ia32.cc b/deps/v8/src/ia32/debug-ia32.cc index 42284ec75..e7a7b6058 100644 --- a/deps/v8/src/ia32/debug-ia32.cc +++ b/deps/v8/src/ia32/debug-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -36,8 +13,6 @@ namespace v8 { namespace internal { -#ifdef ENABLE_DEBUGGER_SUPPORT - bool BreakLocationIterator::IsDebugBreakAtReturn() { return Debug::IsDebugBreakAtReturn(rinfo()); } @@ -50,7 +25,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() { ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallInstructionLength); rinfo()->PatchCodeWithCall( - debug_info_->GetIsolate()->debug()->debug_break_return()->entry(), + debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry(), Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength); } @@ -81,7 +56,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() { ASSERT(IsDebugBreakSlot()); Isolate* isolate = debug_info_->GetIsolate(); rinfo()->PatchCodeWithCall( - isolate->debug()->debug_break_slot()->entry(), + isolate->builtins()->Slot_DebugBreak()->entry(), Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength); } @@ -141,7 +116,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ Move(eax, Immediate(0)); // No arguments. __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate()))); - CEntryStub ceb(1); + CEntryStub ceb(masm->isolate(), 1); __ CallStub(&ceb); // Automatically find register that could be used after register restore. @@ -197,6 +172,17 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } +void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) { + // Register state for CallICStub + // ----------- S t a t e ------------- + // -- edx : type feedback slot (smi) + // -- edi : function + // ----------------------------------- + Generate_DebugBreakCallHelper(masm, edx.bit() | edi.bit(), + 0, false); +} + + void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Register state for IC load call (from ic-ia32.cc). // ----------- S t a t e ------------- @@ -250,15 +236,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { - // Register state for keyed IC call call (from ic-ia32.cc) - // ----------- S t a t e ------------- - // -- ecx: name - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, ecx.bit(), 0, false); -} - - void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { // Register state just before return from JS function (from codegen-ia32.cc). // ----------- S t a t e ------------- @@ -277,18 +254,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { - // Register state for CallFunctionStub (from code-stubs-ia32.cc). - // ----------- S t a t e ------------- - // -- ebx: feedback array - // -- edx: slot in feedback array - // -- edi: function - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, ebx.bit() | edx.bit() | edi.bit(), - 0, false); -} - - void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { // Register state for CallConstructStub (from code-stubs-ia32.cc). // eax is the actual number of arguments not encoded as a smi see comment @@ -369,8 +334,6 @@ const bool Debug::kFrameDropperSupported = true; #undef __ -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal #endif // V8_TARGET_ARCH_IA32 diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc index 711cdf86f..6db045079 100644 --- a/deps/v8/src/ia32/deoptimizer-ia32.cc +++ b/deps/v8/src/ia32/deoptimizer-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/ia32/disasm-ia32.cc b/deps/v8/src/ia32/disasm-ia32.cc index e50a78e34..721b0bb42 100644 --- a/deps/v8/src/ia32/disasm-ia32.cc +++ b/deps/v8/src/ia32/disasm-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <assert.h> #include <stdio.h> diff --git a/deps/v8/src/ia32/frames-ia32.cc b/deps/v8/src/ia32/frames-ia32.cc index 9859ebb0e..55671ba8f 100644 --- a/deps/v8/src/ia32/frames-ia32.cc +++ b/deps/v8/src/ia32/frames-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/ia32/frames-ia32.h b/deps/v8/src/ia32/frames-ia32.h index 2d6145eea..fcfabda8b 100644 --- a/deps/v8/src/ia32/frames-ia32.h +++ b/deps/v8/src/ia32/frames-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_FRAMES_IA32_H_ #define V8_IA32_FRAMES_IA32_H_ diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc index 70a968e8a..63c3ee601 100644 --- a/deps/v8/src/ia32/full-codegen-ia32.cc +++ b/deps/v8/src/ia32/full-codegen-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -74,7 +51,7 @@ class JumpPatchSite BASE_EMBEDDED { void EmitPatchInfo() { if (patch_site_.is_bound()) { int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); - ASSERT(is_int8(delta_to_patch_site)); + ASSERT(is_uint8(delta_to_patch_site)); __ test(eax, Immediate(delta_to_patch_site)); #ifdef DEBUG info_emitted_ = true; @@ -106,12 +83,14 @@ static void EmitStackCheck(MacroAssembler* masm_, Register scratch = esp) { Label ok; Isolate* isolate = masm_->isolate(); - ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(isolate); ASSERT(scratch.is(esp) == (pointers == 0)); + ExternalReference stack_limit; if (pointers != 0) { __ mov(scratch, esp); __ sub(scratch, Immediate(pointers * kPointerSize)); + stack_limit = ExternalReference::address_of_real_stack_limit(isolate); + } else { + stack_limit = ExternalReference::address_of_stack_limit(isolate); } __ cmp(scratch, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, Label::kNear); @@ -138,8 +117,6 @@ void FullCodeGenerator::Generate() { handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); - InitializeFeedbackVector(); - profiling_counter_ = isolate()->factory()->NewCell( Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -227,7 +204,7 @@ void FullCodeGenerator::Generate() { __ Push(info->scope()->GetScopeInfo()); __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(edi); @@ -289,7 +266,7 @@ void FullCodeGenerator::Generate() { } else { type = ArgumentsAccessStub::NEW_SLOPPY_FAST; } - ArgumentsAccessStub stub(type); + ArgumentsAccessStub stub(isolate(), type); __ CallStub(&stub); SetVar(arguments, eax, ebx, edx); @@ -434,12 +411,10 @@ void FullCodeGenerator::EmitReturnSequence() { int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; __ Ret(arguments_bytes, ecx); -#ifdef ENABLE_DEBUGGER_SUPPORT // Check that the size of the code used for returning is large enough // for the debugger's requirements. ASSERT(Assembler::kJSReturnSequenceLength <= masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); -#endif info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); } } @@ -1135,15 +1110,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle<Object> feedback = Handle<Object>( - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), - isolate()); - StoreFeedbackVectorSlot(slot, feedback); - // No need for a write barrier, we are storing a Smi in the feedback vector. __ LoadHeapObject(ebx, FeedbackVector()); __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)), - Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); + Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object @@ -1298,7 +1268,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode(), info->is_generator()); + FastNewClosureStub stub(isolate(), + info->strict_mode(), + info->is_generator()); __ mov(ebx, Immediate(info)); __ CallStub(&stub); } else { @@ -1617,7 +1589,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; int properties_count = constant_properties->length() / 2; - if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || + if (expr->may_store_doubles() || expr->depth() > 1 || + Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); @@ -1632,7 +1605,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_properties)); __ mov(edx, Immediate(Smi::FromInt(flags))); - FastCloneShallowObjectStub stub(properties_count); + FastCloneShallowObjectStub stub(isolate(), properties_count); __ CallStub(&stub); } @@ -1767,11 +1740,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_elements)); FastCloneShallowArrayStub stub( + isolate(), FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, allocation_site_mode, length); __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled() || + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); @@ -1795,7 +1769,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, + length); __ CallStub(&stub); } @@ -1832,7 +1809,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else { // Store the subexpression value in the array's elements. __ mov(ecx, Immediate(Smi::FromInt(i))); - StoreArrayLiteralElementStub stub; + StoreArrayLiteralElementStub stub(isolate()); __ CallStub(&stub); } @@ -1849,7 +1826,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { void FullCodeGenerator::VisitAssignment(Assignment* expr) { - ASSERT(expr->target()->IsValidLeftHandSide()); + ASSERT(expr->target()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ Assignment"); @@ -2081,7 +2058,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) { CallIC(ic, TypeFeedbackId::None()); __ mov(edi, eax); __ mov(Operand(esp, 2 * kPointerSize), edi); - CallFunctionStub stub(1, CALL_AS_METHOD); + CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); __ CallStub(&stub); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); @@ -2225,7 +2202,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Label gc_required; Label allocated; - Handle<Map> map(isolate()->native_context()->generator_result_map()); + Handle<Map> map(isolate()->native_context()->iterator_result_map()); __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT); __ jmp(&allocated); @@ -2288,8 +2265,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, __ bind(&stub_call); __ mov(eax, ecx); - BinaryOpICStub stub(op, mode); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + BinaryOpICStub stub(isolate(), op, mode); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); __ jmp(&done, Label::kNear); @@ -2371,16 +2348,16 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode) { __ pop(edx); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); context()->Plug(eax); } void FullCodeGenerator::EmitAssignment(Expression* expr) { - ASSERT(expr->IsValidLeftHandSide()); + ASSERT(expr->IsValidReferenceExpression()); // Left-hand side can only be a property, a global or a (parameter or local) // slot. @@ -2580,17 +2557,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code, } - - // Code common for calls using the IC. -void FullCodeGenerator::EmitCallWithIC(Call* expr) { +void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - CallFunctionFlags flags; + CallIC::CallType call_type = callee->IsVariableProxy() + ? CallIC::FUNCTION + : CallIC::METHOD; // Get the target function. - if (callee->IsVariableProxy()) { + if (call_type == CallIC::FUNCTION) { { StackValueContext context(this); EmitVariableLoad(callee->AsVariableProxy()); PrepareForBailout(callee, NO_REGISTERS); @@ -2598,7 +2573,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push undefined as receiver. This is patched in the method prologue if it // is a sloppy mode method. __ push(Immediate(isolate()->factory()->undefined_value())); - flags = NO_CALL_FUNCTION_FLAGS; } else { // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2608,39 +2582,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push the target function under the receiver. __ push(Operand(esp, 0)); __ mov(Operand(esp, kPointerSize), eax); - flags = CALL_AS_METHOD; } - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position of the IC call. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, flags); - __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - RecordJSReturnSite(expr); - - // Restore context register. - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, eax); + EmitCall(expr, call_type); } // Code common for calls using the IC. -void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, - Expression* key) { +void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, + Expression* key) { // Load the key. VisitForAccumulatorValue(key); Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2654,29 +2608,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, __ push(Operand(esp, 0)); __ mov(Operand(esp, kPointerSize), eax); - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position of the IC call. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, CALL_AS_METHOD); - __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - RecordJSReturnSite(expr); - - // Restore context register. - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, eax); + EmitCall(expr, CallIC::METHOD); } -void FullCodeGenerator::EmitCallWithStub(Call* expr) { - // Code common for calls using the call stub. +void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { + // Load the arguments. ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); { PreservePositionScope scope(masm()->positions_recorder()); @@ -2684,23 +2621,22 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { VisitForStackValue(args->at(i)); } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); - __ LoadHeapObject(ebx, FeedbackVector()); - __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot()))); - - // Record call targets in unoptimized code. - CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); + // Record source position of the IC call. + SetSourcePosition(expr->position()); + Handle<Code> ic = CallIC::initialize_stub( + isolate(), arg_count, call_type); + __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot()))); __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); + // Don't assign a type feedback id to the IC, since type feedback is provided + // by the vector above. + CallIC(ic); RecordJSReturnSite(expr); + // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + context()->DropAndPlug(1, eax); } @@ -2764,7 +2700,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { } // Record source position for debugger. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); RecordJSReturnSite(expr); @@ -2773,7 +2709,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { context()->DropAndPlug(1, eax); } else if (call_type == Call::GLOBAL_CALL) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else if (call_type == Call::LOOKUP_SLOT_CALL) { // Call to a lookup slot (dynamically introduced variable). @@ -2809,7 +2745,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { // The receiver is either the global receiver or an object found by // LoadContextSlot. - EmitCallWithStub(expr); + EmitCall(expr); } else if (call_type == Call::PROPERTY_CALL) { Property* property = callee->AsProperty(); @@ -2817,9 +2753,9 @@ void FullCodeGenerator::VisitCall(Call* expr) { VisitForStackValue(property->obj()); } if (property->key()->IsPropertyName()) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else { - EmitKeyedCallWithIC(expr, property->key()); + EmitKeyedCallWithLoadIC(expr, property->key()); } } else { @@ -2830,7 +2766,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { } __ push(Immediate(isolate()->factory()->undefined_value())); // Emit function call. - EmitCallWithStub(expr); + EmitCall(expr); } #ifdef DEBUG @@ -2867,12 +2803,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ mov(edi, Operand(esp, arg_count * kPointerSize)); // Record call targets in unoptimized code. - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); if (FLAG_pretenuring_call_new) { - StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), - isolate()->factory()->NewAllocationSite()); + EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); ASSERT(expr->AllocationSiteFeedbackSlot() == expr->CallNewFeedbackSlot() + 1); } @@ -2880,8 +2812,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ LoadHeapObject(ebx, FeedbackVector()); __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot()))); - CallConstructStub stub(RECORD_CALL_TARGET); - __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); + CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); + __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); PrepareForBailoutForId(expr->ReturnId(), TOS_REG); context()->Plug(eax); } @@ -3257,7 +3189,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ mov(edx, eax); __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters()))); - ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); __ CallStub(&stub); context()->Plug(eax); } @@ -3347,30 +3279,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { } -void FullCodeGenerator::EmitLog(CallRuntime* expr) { - // Conditionally generate a log call. - // Args: - // 0 (literal string): The type of logging (corresponds to the flags). - // This is used to determine whether or not to generate the log call. - // 1 (string): Format string. Access the string at argument index 2 - // with '%2s' (see Logger::LogRuntime for all the formats). - // 2 (array): Arguments to the format string. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT_EQ(args->length(), 3); - if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { - VisitForStackValue(args->at(1)); - VisitForStackValue(args->at(2)); - __ CallRuntime(Runtime::kHiddenLog, 2); - } - // Finally, we're expected to leave a value on the top of the stack. - __ mov(eax, isolate()->factory()->undefined_value()); - context()->Plug(eax); -} - - void FullCodeGenerator::EmitSubString(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - SubStringStub stub; + SubStringStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3383,7 +3294,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpExecStub stub; + RegExpExecStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 4); VisitForStackValue(args->at(0)); @@ -3536,10 +3447,10 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { VisitForStackValue(args->at(1)); if (CpuFeatures::IsSupported(SSE2)) { - MathPowStub stub(MathPowStub::ON_STACK); + MathPowStub stub(isolate(), MathPowStub::ON_STACK); __ CallStub(&stub); } else { - __ CallRuntime(Runtime::kMath_pow, 2); + __ CallRuntime(Runtime::kHiddenMathPowSlow, 2); } context()->Plug(eax); } @@ -3581,7 +3492,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { // Load the argument into eax and call the stub. VisitForAccumulatorValue(args->at(0)); - NumberToStringStub stub; + NumberToStringStub stub(isolate()); __ CallStub(&stub); context()->Plug(eax); } @@ -3707,7 +3618,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { VisitForAccumulatorValue(args->at(1)); __ pop(edx); - StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); + StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); __ CallStub(&stub); context()->Plug(eax); } @@ -3720,32 +3631,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - StringCompareStub stub; + StringCompareStub stub(isolate()); __ CallStub(&stub); context()->Plug(eax); } -void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_log, 1); - context()->Plug(eax); -} - - -void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_sqrt, 1); - context()->Plug(eax); -} - - void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() >= 2); @@ -3780,7 +3671,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -4178,7 +4069,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { // Record source position of the IC call. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); // Restore context register. @@ -4315,7 +4206,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { - ASSERT(expr->expression()->IsValidLeftHandSide()); + ASSERT(expr->expression()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ CountOperation"); SetSourcePosition(expr->position()); @@ -4407,7 +4298,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ jmp(&stub_call, Label::kNear); __ bind(&slow); } - ToNumberStub convert_stub; + ToNumberStub convert_stub(isolate()); __ CallStub(&convert_stub); // Save result for postfix expressions. @@ -4437,8 +4328,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ bind(&stub_call); __ mov(edx, eax); __ mov(eax, Immediate(Smi::FromInt(1))); - BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE); - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); + BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE); + CallIC(stub.GetCode(), expr->CountBinOpFeedbackId()); patch_site.EmitPatchInfo(); __ bind(&done); @@ -4554,12 +4445,13 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - if (check->Equals(isolate()->heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(check, factory->number_string())) { __ JumpIfSmi(eax, if_true); __ cmp(FieldOperand(eax, HeapObject::kMapOffset), isolate()->factory()->heap_number_map()); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->string_string())) { + } else if (String::Equals(check, factory->string_string())) { __ JumpIfSmi(eax, if_false); __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx); __ j(above_equal, if_false); @@ -4567,20 +4459,20 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 1 << Map::kIsUndetectable); Split(zero, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->symbol_string())) { + } else if (String::Equals(check, factory->symbol_string())) { __ JumpIfSmi(eax, if_false); __ CmpObjectType(eax, SYMBOL_TYPE, edx); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->boolean_string())) { + } else if (String::Equals(check, factory->boolean_string())) { __ cmp(eax, isolate()->factory()->true_value()); __ j(equal, if_true); __ cmp(eax, isolate()->factory()->false_value()); Split(equal, if_true, if_false, fall_through); } else if (FLAG_harmony_typeof && - check->Equals(isolate()->heap()->null_string())) { + String::Equals(check, factory->null_string())) { __ cmp(eax, isolate()->factory()->null_value()); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->undefined_string())) { + } else if (String::Equals(check, factory->undefined_string())) { __ cmp(eax, isolate()->factory()->undefined_value()); __ j(equal, if_true); __ JumpIfSmi(eax, if_false); @@ -4589,14 +4481,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset)); __ test(ecx, Immediate(1 << Map::kIsUndetectable)); Split(not_zero, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->function_string())) { + } else if (String::Equals(check, factory->function_string())) { __ JumpIfSmi(eax, if_false); STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx); __ j(equal, if_true); __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->object_string())) { + } else if (String::Equals(check, factory->object_string())) { __ JumpIfSmi(eax, if_false); if (!FLAG_harmony_typeof) { __ cmp(eax, isolate()->factory()->null_value()); @@ -4647,7 +4539,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::INSTANCEOF: { VisitForStackValue(expr->right()); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ CallStub(&stub); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); __ test(eax, eax); diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc index c2be7da1a..52aa0ea11 100644 --- a/deps/v8/src/ia32/ic-ia32.cc +++ b/deps/v8/src/ia32/ic-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1282,7 +1259,7 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { Address delta_address = test_instruction_address + 1; // The delta to the start of the map check instruction and the // condition code uses at the patched jump. - int8_t delta = *reinterpret_cast<int8_t*>(delta_address); + uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address); if (FLAG_trace_ic) { PrintF("[ patching ic at %p, test=%p, delta=%d\n", address, test_instruction_address, delta); diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc index 0dbe3da13..2872d4dc0 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -41,9 +18,10 @@ namespace v8 { namespace internal { -static SaveFPRegsMode GetSaveFPRegsMode() { +static SaveFPRegsMode GetSaveFPRegsMode(Isolate* isolate) { // We don't need to save floating point regs when generating the snapshot - return CpuFeatures::IsSafeForSnapshot(SSE2) ? kSaveFPRegs : kDontSaveFPRegs; + return CpuFeatures::IsSafeForSnapshot(isolate, SSE2) ? kSaveFPRegs + : kDontSaveFPRegs; } @@ -108,13 +86,6 @@ void LCodeGen::FinishCode(Handle<Code> code) { if (!info()->IsStub()) { Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); } - info()->CommitDependencies(code); -} - - -void LCodeGen::Abort(BailoutReason reason) { - info()->set_bailout_reason(reason); - status_ = ABORTED; } @@ -293,7 +264,7 @@ bool LCodeGen::GeneratePrologue() { Comment(";;; Allocate local context"); // Argument to NewContext is the function, which is still in edi. if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(edi); @@ -406,7 +377,7 @@ void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); } else if (FLAG_debug_code && FLAG_enable_slow_asserts && !instr->IsGap() && !instr->IsReturn()) { - if (instr->ClobbersDoubleRegisters()) { + if (instr->ClobbersDoubleRegisters(isolate())) { if (instr->HasDoubleRegisterResult()) { ASSERT_EQ(1, x87_stack_.depth()); } else { @@ -705,7 +676,7 @@ void LCodeGen::X87PrepareBinaryOp( void LCodeGen::X87Stack::FlushIfNecessary(LInstruction* instr, LCodeGen* cgen) { - if (stack_depth_ > 0 && instr->ClobbersDoubleRegisters()) { + if (stack_depth_ > 0 && instr->ClobbersDoubleRegisters(isolate())) { bool double_inputs = instr->HasDoubleRegisterInput(); // Flush stack from tos down, since FreeX87() will mess with tos @@ -1051,6 +1022,7 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, void LCodeGen::RegisterEnvironmentForDeoptimization( LEnvironment* environment, Safepoint::DeoptMode mode) { + environment->set_has_been_used(); if (!environment->HasBeenRegistered()) { // Physical stack frame layout: // -x ............. -4 0 ..................................... y @@ -1175,7 +1147,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { int length = deoptimizations_.length(); if (length == 0) return; Handle<DeoptimizationInputData> data = - factory()->NewDeoptimizationInputData(length, TENURED); + DeoptimizationInputData::New(isolate(), length, TENURED); Handle<ByteArray> translations = translations_.CreateByteArray(isolate()->factory()); @@ -1349,18 +1321,18 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ASSERT(ToRegister(instr->result()).is(eax)); switch (instr->hydrogen()->major_key()) { case CodeStub::RegExpExec: { - RegExpExecStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + RegExpExecStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::SubString: { - SubStringStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + SubStringStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::StringCompare: { - StringCompareStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + StringCompareStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } default: @@ -1493,7 +1465,7 @@ void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { Register dividend = ToRegister(instr->dividend()); int32_t divisor = instr->divisor(); Register result = ToRegister(instr->result()); - ASSERT(divisor == kMinInt || (divisor != 0 && IsPowerOf2(Abs(divisor)))); + ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); ASSERT(!result.is(dividend)); // Check for (0 / -x) that will produce negative zero. @@ -1556,15 +1528,15 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. void LCodeGen::DoDivI(LDivI* instr) { HBinaryOperation* hdiv = instr->hydrogen(); - Register dividend = ToRegister(instr->left()); - Register divisor = ToRegister(instr->right()); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); Register remainder = ToRegister(instr->temp()); - Register result = ToRegister(instr->result()); ASSERT(dividend.is(eax)); ASSERT(remainder.is(edx)); - ASSERT(result.is(eax)); + ASSERT(ToRegister(instr->result()).is(eax)); ASSERT(!divisor.is(eax)); ASSERT(!divisor.is(edx)); @@ -1598,15 +1570,7 @@ void LCodeGen::DoDivI(LDivI* instr) { __ cdq(); __ idiv(divisor); - if (hdiv->IsMathFloorOfDiv()) { - Label done; - __ test(remainder, remainder); - __ j(zero, &done, Label::kNear); - __ xor_(remainder, divisor); - __ sar(remainder, 31); - __ add(result, remainder); - __ bind(&done); - } else if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { // Deoptimize if remainder is not 0. __ test(remainder, remainder); DeoptimizeIf(not_zero, instr->environment()); @@ -1629,22 +1593,26 @@ void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { } // If the divisor is negative, we have to negate and handle edge cases. - Label not_kmin_int, done; __ neg(dividend); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(zero, instr->environment()); } - if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { - // Note that we could emit branch-free code, but that would need one more - // register. - if (divisor == -1) { - DeoptimizeIf(overflow, instr->environment()); - } else { - __ j(no_overflow, ¬_kmin_int, Label::kNear); - __ mov(dividend, Immediate(kMinInt / divisor)); - __ jmp(&done, Label::kNear); - } + + if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { + __ sar(dividend, shift); + return; + } + + // Dividing by -1 is basically negation, unless we overflow. + if (divisor == -1) { + DeoptimizeIf(overflow, instr->environment()); + return; } + + Label not_kmin_int, done; + __ j(no_overflow, ¬_kmin_int, Label::kNear); + __ mov(dividend, Immediate(kMinInt / divisor)); + __ jmp(&done, Label::kNear); __ bind(¬_kmin_int); __ sar(dividend, shift); __ bind(&done); @@ -1696,6 +1664,59 @@ void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. +void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { + HBinaryOperation* hdiv = instr->hydrogen(); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); + Register remainder = ToRegister(instr->temp()); + Register result = ToRegister(instr->result()); + ASSERT(dividend.is(eax)); + ASSERT(remainder.is(edx)); + ASSERT(result.is(eax)); + ASSERT(!divisor.is(eax)); + ASSERT(!divisor.is(edx)); + + // Check for x / 0. + if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { + __ test(divisor, divisor); + DeoptimizeIf(zero, instr->environment()); + } + + // Check for (0 / -x) that will produce negative zero. + if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { + Label dividend_not_zero; + __ test(dividend, dividend); + __ j(not_zero, ÷nd_not_zero, Label::kNear); + __ test(divisor, divisor); + DeoptimizeIf(sign, instr->environment()); + __ bind(÷nd_not_zero); + } + + // Check for (kMinInt / -1). + if (hdiv->CheckFlag(HValue::kCanOverflow)) { + Label dividend_not_min_int; + __ cmp(dividend, kMinInt); + __ j(not_zero, ÷nd_not_min_int, Label::kNear); + __ cmp(divisor, -1); + DeoptimizeIf(zero, instr->environment()); + __ bind(÷nd_not_min_int); + } + + // Sign extend to edx (= remainder). + __ cdq(); + __ idiv(divisor); + + Label done; + __ test(remainder, remainder); + __ j(zero, &done, Label::kNear); + __ xor_(remainder, divisor); + __ sar(remainder, 31); + __ add(result, remainder); + __ bind(&done); +} + + void LCodeGen::DoMulI(LMulI* instr) { Register left = ToRegister(instr->left()); LOperand* right = instr->right(); @@ -1942,7 +1963,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) { int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); ASSERT(instr->result()->IsDoubleRegister()); - if (!CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (!CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { __ push(Immediate(upper)); __ push(Immediate(lower)); X87Register reg = ToX87Register(instr->result()); @@ -1990,9 +2011,16 @@ void LCodeGen::DoConstantE(LConstantE* instr) { void LCodeGen::DoConstantT(LConstantT* instr) { Register reg = ToRegister(instr->result()); - Handle<Object> handle = instr->value(isolate()); + Handle<Object> object = instr->value(isolate()); AllowDeferredHandleDereference smi_check; - __ LoadObject(reg, handle); + if (instr->hydrogen()->HasObjectMap()) { + Handle<Map> object_map = instr->hydrogen()->ObjectMap().handle(); + ASSERT(object->IsHeapObject()); + ASSERT(!object_map->is_stable() || + *object_map == Handle<HeapObject>::cast(object)->map()); + USE(object_map); + } + __ LoadObject(reg, object); } @@ -2215,7 +2243,7 @@ void LCodeGen::DoMathMinMax(LMathMinMax* instr) { void LCodeGen::DoArithmeticD(LArithmeticD* instr) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister left = ToDoubleRegister(instr->left()); XMMRegister right = ToDoubleRegister(instr->right()); @@ -2307,8 +2335,8 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->right()).is(eax)); ASSERT(ToRegister(instr->result()).is(eax)); - BinaryOpICStub stub(instr->op(), NO_OVERWRITE); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -2460,7 +2488,7 @@ void LCodeGen::DoBranch(LBranch* instr) { __ cmp(FieldOperand(reg, HeapObject::kMapOffset), factory()->heap_number_map()); __ j(not_equal, ¬_heap_number, Label::kNear); - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister xmm_scratch = double_scratch0(); __ xorps(xmm_scratch, xmm_scratch); @@ -2547,7 +2575,7 @@ void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) { EmitGoto(next_block); } else { if (instr->is_double()) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); } else { @@ -2925,8 +2953,8 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { void LCodeGen::DoInstanceOf(LInstanceOf* instr) { // Object and function are in fixed registers defined by the stub. ASSERT(ToRegister(instr->context()).is(esi)); - InstanceofStub stub(InstanceofStub::kArgsInRegisters); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); Label true_value, done; __ test(eax, Operand(eax)); @@ -3014,7 +3042,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, flags | InstanceofStub::kCallSiteInlineCheck); flags = static_cast<InstanceofStub::Flags>( flags | InstanceofStub::kReturnTrueFalseObject); - InstanceofStub stub(flags); + InstanceofStub stub(isolate(), flags); // Get the temp register reserved by the instruction. This needs to be a // register which is pushed last by PushSafepointRegisters as top of the @@ -3027,7 +3055,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; __ mov(temp, Immediate(delta)); __ StoreToSafepointRegisterSlot(temp, temp); - CallCodeGeneric(stub.GetCode(isolate()), + CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); @@ -3223,7 +3251,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { offset, value, temp, - GetSaveFPRegsMode(), + GetSaveFPRegsMode(isolate()), EMIT_REMEMBERED_SET, check_needed); } @@ -4117,7 +4145,7 @@ void LCodeGen::DoPower(LPower* instr) { ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); if (exponent_type.IsSmi()) { - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsTagged()) { Label no_deopt; @@ -4125,14 +4153,14 @@ void LCodeGen::DoPower(LPower* instr) { __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); DeoptimizeIf(not_equal, instr->environment()); __ bind(&no_deopt); - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsInteger32()) { - MathPowStub stub(MathPowStub::INTEGER); + MathPowStub stub(isolate(), MathPowStub::INTEGER); __ CallStub(&stub); } else { ASSERT(exponent_type.IsDouble()); - MathPowStub stub(MathPowStub::DOUBLE); + MathPowStub stub(isolate(), MathPowStub::DOUBLE); __ CallStub(&stub); } } @@ -4225,8 +4253,8 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ASSERT(ToRegister(instr->result()).is(eax)); int arity = instr->arity(); - CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -4237,9 +4265,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) { // No cell in ebx for construct type feedback in optimized code __ mov(ebx, isolate()->factory()->undefined_value()); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ Move(eax, Immediate(instr->arity())); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } @@ -4257,8 +4285,8 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { : DONT_OVERRIDE; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { Label done; if (IsFastPackedElementsKind(kind)) { @@ -4270,18 +4298,20 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { __ j(zero, &packed_case, Label::kNear); ElementsKind holey_kind = GetHoleyElementsKind(kind); - ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), + holey_kind, + override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ jmp(&done, Label::kNear); __ bind(&packed_case); } - ArraySingleArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } } @@ -4314,7 +4344,7 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { - Representation representation = instr->representation(); + Representation representation = instr->hydrogen()->field_representation(); HObjectAccess access = instr->hydrogen()->access(); int offset = access.offset(); @@ -4336,7 +4366,6 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } Register object = ToRegister(instr->object()); - Handle<Map> transition = instr->transition(); SmiCheck check_needed = instr->hydrogen()->value()->IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; @@ -4356,13 +4385,13 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { __ test(value, Immediate(kSmiTagMask)); DeoptimizeIf(zero, instr->environment()); - // We know that value is a smi now, so we can omit the check below. + // We know now that value is not a smi, so we can omit the check below. check_needed = OMIT_SMI_CHECK; } } } else if (representation.IsDouble()) { - ASSERT(transition.is_null()); ASSERT(access.IsInobject()); + ASSERT(!instr->hydrogen()->has_transition()); ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); if (CpuFeatures::IsSupported(SSE2)) { CpuFeatureScope scope(masm(), SSE2); @@ -4375,7 +4404,9 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { return; } - if (!transition.is_null()) { + if (instr->hydrogen()->has_transition()) { + Handle<Map> transition = instr->hydrogen()->transition_map(); + AddDeprecationDependency(transition); if (!instr->hydrogen()->NeedsWriteBarrierForMap()) { __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); } else { @@ -4388,7 +4419,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { HeapObject::kMapOffset, temp_map, temp, - GetSaveFPRegsMode(), + GetSaveFPRegsMode(isolate()), OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); } @@ -4429,7 +4460,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { offset, value, temp, - GetSaveFPRegsMode(), + GetSaveFPRegsMode(isolate()), EMIT_REMEMBERED_SET, check_needed); } @@ -4447,34 +4478,27 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { } -void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { - if (FLAG_debug_code && check->hydrogen()->skip_check()) { +void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { + Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal; + if (instr->index()->IsConstantOperand()) { + __ cmp(ToOperand(instr->length()), + ToImmediate(LConstantOperand::cast(instr->index()), + instr->hydrogen()->length()->representation())); + cc = ReverseCondition(cc); + } else if (instr->length()->IsConstantOperand()) { + __ cmp(ToOperand(instr->index()), + ToImmediate(LConstantOperand::cast(instr->length()), + instr->hydrogen()->index()->representation())); + } else { + __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); + } + if (FLAG_debug_code && instr->hydrogen()->skip_check()) { Label done; __ j(NegateCondition(cc), &done, Label::kNear); __ int3(); __ bind(&done); } else { - DeoptimizeIf(cc, check->environment()); - } -} - - -void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { - if (instr->hydrogen()->skip_check() && !FLAG_debug_code) return; - - if (instr->index()->IsConstantOperand()) { - Immediate immediate = - ToImmediate(LConstantOperand::cast(instr->index()), - instr->hydrogen()->length()->representation()); - __ cmp(ToOperand(instr->length()), immediate); - Condition condition = - instr->hydrogen()->allow_equality() ? below : below_equal; - ApplyCheckIf(condition, instr); - } else { - __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); - Condition condition = - instr->hydrogen()->allow_equality() ? above : above_equal; - ApplyCheckIf(condition, instr); + DeoptimizeIf(cc, instr->environment()); } } @@ -4496,7 +4520,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { instr->additional_index())); if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS || elements_kind == FLOAT32_ELEMENTS) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister xmm_scratch = double_scratch0(); __ cvtsd2ss(xmm_scratch, ToDoubleRegister(instr->value())); @@ -4507,7 +4531,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { } } else if (elements_kind == EXTERNAL_FLOAT64_ELEMENTS || elements_kind == FLOAT64_ELEMENTS) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); __ movsd(operand, ToDoubleRegister(instr->value())); } else { @@ -4566,7 +4590,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { FixedDoubleArray::kHeaderSize - kHeapObjectTag, instr->additional_index()); - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister value = ToDoubleRegister(instr->value()); @@ -4668,7 +4692,7 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { __ RecordWrite(elements, key, value, - GetSaveFPRegsMode(), + GetSaveFPRegsMode(isolate()), EMIT_REMEMBERED_SET, check_needed); } @@ -4736,16 +4760,14 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { kDontSaveFPRegs); } else { ASSERT(ToRegister(instr->context()).is(esi)); + ASSERT(object_reg.is(eax)); PushSafepointRegistersScope scope(this); - if (!object_reg.is(eax)) { - __ mov(eax, object_reg); - } __ mov(ebx, to_map); bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; - TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); + TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); __ CallStub(&stub); - RecordSafepointWithRegisters( - instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); + RecordSafepointWithLazyDeopt(instr, + RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); } __ bind(¬_applicable); } @@ -4866,9 +4888,10 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) { ASSERT(ToRegister(instr->context()).is(esi)); ASSERT(ToRegister(instr->left()).is(edx)); ASSERT(ToRegister(instr->right()).is(eax)); - StringAddStub stub(instr->hydrogen()->flags(), + StringAddStub stub(isolate(), + instr->hydrogen()->flags(), instr->hydrogen()->pretenure_flag()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -5406,7 +5429,7 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) { Register result_reg = ToRegister(result); if (instr->truncating()) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister input_reg = ToDoubleRegister(input); __ TruncateDoubleToI(result_reg, input_reg); @@ -5417,7 +5440,7 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) { } } else { Label bailout, done; - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister input_reg = ToDoubleRegister(input); XMMRegister xmm_scratch = double_scratch0(); @@ -5445,7 +5468,7 @@ void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { Register result_reg = ToRegister(result); Label bailout, done; - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister input_reg = ToDoubleRegister(input); XMMRegister xmm_scratch = double_scratch0(); @@ -5578,29 +5601,35 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register object_; }; - if (instr->hydrogen()->CanOmitMapChecks()) return; + if (instr->hydrogen()->IsStabilityCheck()) { + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); + for (int i = 0; i < maps->size(); ++i) { + AddStabilityDependency(maps->at(i).handle()); + } + return; + } LOperand* input = instr->value(); ASSERT(input->IsRegister()); Register reg = ToRegister(input); DeferredCheckMaps* deferred = NULL; - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); __ bind(deferred->check_maps()); } - UniqueSet<Map> map_set = instr->hydrogen()->map_set(); + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); Label success; - for (int i = 0; i < map_set.size() - 1; i++) { - Handle<Map> map = map_set.at(i).handle(); + for (int i = 0; i < maps->size() - 1; i++) { + Handle<Map> map = maps->at(i).handle(); __ CompareMap(reg, map); __ j(equal, &success, Label::kNear); } - Handle<Map> map = map_set.at(map_set.size() - 1).handle(); + Handle<Map> map = maps->at(maps->size() - 1).handle(); __ CompareMap(reg, map); - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { __ j(not_equal, deferred->entry()); } else { DeoptimizeIf(not_equal, instr->environment()); @@ -5908,7 +5937,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { __ push(size); } else { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - __ push(Immediate(Smi::FromInt(size))); + if (size >= 0 && size <= Smi::kMaxValue) { + __ push(Immediate(Smi::FromInt(size))); + } else { + // We should never get here at runtime => abort + __ int3(); + return; + } } int flags = AllocateDoubleAlignFlag::encode( @@ -5996,10 +6031,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && instr->hydrogen()->has_no_literals()) { - FastNewClosureStub stub(instr->hydrogen()->strict_mode(), + FastNewClosureStub stub(isolate(), + instr->hydrogen()->strict_mode(), instr->hydrogen()->is_generator()); __ mov(ebx, Immediate(instr->hydrogen()->shared_info())); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { __ push(esi); __ push(Immediate(instr->hydrogen()->shared_info())); @@ -6040,13 +6076,13 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Label::Distance false_distance = right_block == next_block ? Label::kNear : Label::kFar; Condition final_branch_condition = no_condition; - if (type_name->Equals(heap()->number_string())) { + if (String::Equals(type_name, factory()->number_string())) { __ JumpIfSmi(input, true_label, true_distance); __ cmp(FieldOperand(input, HeapObject::kMapOffset), factory()->heap_number_map()); final_branch_condition = equal; - } else if (type_name->Equals(heap()->string_string())) { + } else if (String::Equals(type_name, factory()->string_string())) { __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input); __ j(above_equal, false_label, false_distance); @@ -6054,22 +6090,23 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { 1 << Map::kIsUndetectable); final_branch_condition = zero; - } else if (type_name->Equals(heap()->symbol_string())) { + } else if (String::Equals(type_name, factory()->symbol_string())) { __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, SYMBOL_TYPE, input); final_branch_condition = equal; - } else if (type_name->Equals(heap()->boolean_string())) { + } else if (String::Equals(type_name, factory()->boolean_string())) { __ cmp(input, factory()->true_value()); __ j(equal, true_label, true_distance); __ cmp(input, factory()->false_value()); final_branch_condition = equal; - } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { + } else if (FLAG_harmony_typeof && + String::Equals(type_name, factory()->null_string())) { __ cmp(input, factory()->null_value()); final_branch_condition = equal; - } else if (type_name->Equals(heap()->undefined_string())) { + } else if (String::Equals(type_name, factory()->undefined_string())) { __ cmp(input, factory()->undefined_value()); __ j(equal, true_label, true_distance); __ JumpIfSmi(input, false_label, false_distance); @@ -6079,7 +6116,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { 1 << Map::kIsUndetectable); final_branch_condition = not_zero; - } else if (type_name->Equals(heap()->function_string())) { + } else if (String::Equals(type_name, factory()->function_string())) { STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, JS_FUNCTION_TYPE, input); @@ -6087,7 +6124,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE); final_branch_condition = equal; - } else if (type_name->Equals(heap()->object_string())) { + } else if (String::Equals(type_name, factory()->object_string())) { __ JumpIfSmi(input, false_label, false_distance); if (!FLAG_harmony_typeof) { __ cmp(input, factory()->null_value()); @@ -6325,11 +6362,56 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { } +void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register object, + Register index) { + PushSafepointRegistersScope scope(this); + __ push(object); + __ push(index); + __ xor_(esi, esi); + __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); + RecordSafepointWithRegisters( + instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); + __ StoreToSafepointRegisterSlot(object, eax); +} + + void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { + class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { + public: + DeferredLoadMutableDouble(LCodeGen* codegen, + LLoadFieldByIndex* instr, + Register object, + Register index, + const X87Stack& x87_stack) + : LDeferredCode(codegen, x87_stack), + instr_(instr), + object_(object), + index_(index) { + } + virtual void Generate() V8_OVERRIDE { + codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_); + } + virtual LInstruction* instr() V8_OVERRIDE { return instr_; } + private: + LLoadFieldByIndex* instr_; + Register object_; + Register index_; + }; + Register object = ToRegister(instr->object()); Register index = ToRegister(instr->index()); + DeferredLoadMutableDouble* deferred; + deferred = new(zone()) DeferredLoadMutableDouble( + this, instr, object, index, x87_stack_); + Label out_of_object, done; + __ test(index, Immediate(Smi::FromInt(1))); + __ j(not_zero, deferred->entry()); + + __ sar(index, 1); + __ cmp(index, Immediate(0)); __ j(less, &out_of_object, Label::kNear); __ mov(object, FieldOperand(object, @@ -6346,6 +6428,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { index, times_half_pointer_size, FixedArray::kHeaderSize - kPointerSize)); + __ bind(deferred->exit()); __ bind(&done); } diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h index 079595cba..f4542eecd 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.h +++ b/deps/v8/src/ia32/lithium-codegen-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_ #define V8_IA32_LITHIUM_CODEGEN_IA32_H_ @@ -36,7 +13,7 @@ #include "lithium-codegen.h" #include "safepoint-table.h" #include "scopes.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { @@ -163,6 +140,9 @@ class LCodeGen: public LCodeGenBase { void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, Label* map_check); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); + void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register object, + Register index); // Parallel move support. void DoParallelMove(LParallelMove* move); @@ -194,8 +174,6 @@ class LCodeGen: public LCodeGenBase { int GetStackSlotCount() const { return chunk()->spill_slot_count(); } - void Abort(BailoutReason reason); - void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } void SaveCallerDoubles(); @@ -268,7 +246,6 @@ class LCodeGen: public LCodeGenBase { LEnvironment* environment, Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition cc, LEnvironment* environment); - void ApplyCheckIf(Condition cc, LBoundsCheck* check); bool DeoptEveryNTimes() { return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); @@ -457,6 +434,7 @@ class LCodeGen: public LCodeGenBase { } MacroAssembler* masm() const { return masm_; } + Isolate* isolate() const { return masm_->isolate(); } private: int ArrayIndex(X87Register reg); diff --git a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc index 01821d95f..34b949085 100644 --- a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc +++ b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/ia32/lithium-gap-resolver-ia32.h b/deps/v8/src/ia32/lithium-gap-resolver-ia32.h index 4aff241f4..0eca35b39 100644 --- a/deps/v8/src/ia32/lithium-gap-resolver-ia32.h +++ b/deps/v8/src/ia32/lithium-gap-resolver-ia32.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_LITHIUM_GAP_RESOLVER_IA32_H_ #define V8_IA32_LITHIUM_GAP_RESOLVER_IA32_H_ diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc index 696c6be6e..3231095ad 100644 --- a/deps/v8/src/ia32/lithium-ia32.cc +++ b/deps/v8/src/ia32/lithium-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -688,6 +665,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); + // We can't really figure out if the environment is needed or not. + instr->environment()->set_has_been_used(); } return instr; @@ -936,7 +915,8 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 - if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) { + if (!(instr->ClobbersRegisters() && + instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { @@ -960,13 +940,13 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { if (FLAG_stress_environments && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); } - if (!CpuFeatures::IsSafeForSnapshot(SSE2) && instr->IsGoto() && + if (!CpuFeatures::IsSafeForSnapshot(isolate(), SSE2) && instr->IsGoto() && LGoto::cast(instr)->jumps_to_join()) { // TODO(olivf) Since phis of spilled values are joined as registers // (not in the stack slot), we need to allow the goto gaps to keep one // x87 register alive. To ensure all other values are still spilled, we // insert a fpu register barrier right before. - LClobberDoubles* clobber = new(zone()) LClobberDoubles(); + LClobberDoubles* clobber = new(zone()) LClobberDoubles(isolate()); clobber->set_hydrogen_value(current); chunk_->AddInstruction(clobber, current_block_); } @@ -1365,7 +1345,7 @@ LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) { } -LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { +LInstruction* LChunkBuilder::DoDivI(HDiv* instr) { ASSERT(instr->representation().IsSmiOrInteger32()); ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); @@ -1377,8 +1357,7 @@ LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { if (instr->CheckFlag(HValue::kCanBeDivByZero) || instr->CheckFlag(HValue::kBailoutOnMinusZero) || instr->CheckFlag(HValue::kCanOverflow) || - (!instr->IsMathFloorOfDiv() && - !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { result = AssignEnvironment(result); } return result; @@ -1442,13 +1421,31 @@ LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) { } +LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) { + ASSERT(instr->representation().IsSmiOrInteger32()); + ASSERT(instr->left()->representation().Equals(instr->representation())); + ASSERT(instr->right()->representation().Equals(instr->representation())); + LOperand* dividend = UseFixed(instr->left(), eax); + LOperand* divisor = UseRegister(instr->right()); + LOperand* temp = FixedTemp(edx); + LInstruction* result = DefineFixed(new(zone()) LFlooringDivI( + dividend, divisor, temp), eax); + if (instr->CheckFlag(HValue::kCanBeDivByZero) || + instr->CheckFlag(HValue::kBailoutOnMinusZero) || + instr->CheckFlag(HValue::kCanOverflow)) { + result = AssignEnvironment(result); + } + return result; +} + + LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { if (instr->RightIsPowerOf2()) { return DoFlooringDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoFlooringDivByConstI(instr); } else { - return DoDivI(instr); + return DoFlooringDivI(instr); } } @@ -1658,6 +1655,8 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { + LInstruction* goto_instr = CheckElideControlInstruction(instr); + if (goto_instr != NULL) return goto_instr; Representation r = instr->representation(); if (r.IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(r)); @@ -1846,9 +1845,16 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - return AssignEnvironment(new(zone()) LBoundsCheck( - UseRegisterOrConstantAtStart(instr->index()), - UseAtStart(instr->length()))); + if (!FLAG_debug_code && instr->skip_check()) return NULL; + LOperand* index = UseRegisterOrConstantAtStart(instr->index()); + LOperand* length = !index->IsConstantOperand() + ? UseOrConstantAtStart(instr->length()) + : UseAtStart(instr->length()); + LInstruction* result = new(zone()) LBoundsCheck(index, length); + if (!FLAG_debug_code || !instr->skip_check()) { + result = AssignEnvironment(result); + } + return result; } @@ -1882,29 +1888,23 @@ LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) { LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); + HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LDummyUse(value)); } from = Representation::Tagged(); } - // Only mark conversions that might need to allocate as calling rather than - // all changes. This makes simple, non-allocating conversion not have to force - // building a stack frame. if (from.IsTagged()) { if (to.IsDouble()) { - LOperand* value = UseRegister(instr->value()); - // Temp register only necessary for minus zero check. + LOperand* value = UseRegister(val); LOperand* temp = TempRegister(); - LInstruction* result = DefineAsRegister( - new(zone()) LNumberUntagD(value, temp)); - if (!instr->value()->representation().IsSmi()) { - result = AssignEnvironment(result); - } + LInstruction* result = + DefineAsRegister(new(zone()) LNumberUntagD(value, temp)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); return result; } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); if (val->type().IsSmi()) { return DefineSameAsFirst(new(zone()) LDummyUse(value)); @@ -1912,44 +1912,39 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value))); } else { ASSERT(to.IsInteger32()); - HValue* val = instr->value(); if (val->type().IsSmi() || val->representation().IsSmi()) { LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LSmiUntag(value, false)); } else { + LOperand* value = UseRegister(val); bool truncating = instr->CanTruncateToInt32(); LOperand* xmm_temp = - (CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating) + (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2) && !truncating) ? FixedTemp(xmm1) : NULL; - LInstruction* result = DefineSameAsFirst( - new(zone()) LTaggedToI(UseRegister(val), xmm_temp)); - if (!instr->value()->representation().IsSmi()) { - // Note: Only deopts in deferred code. - result = AssignEnvironment(result); - } + LInstruction* result = + DefineSameAsFirst(new(zone()) LTaggedToI(value, xmm_temp)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); return result; } } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); - LOperand* value = UseRegisterAtStart(instr->value()); + LOperand* value = UseRegisterAtStart(val); LOperand* temp = FLAG_inline_new ? TempRegister() : NULL; - - // Make sure that temp and result_temp are different registers. LUnallocated* result_temp = TempRegister(); LNumberTagD* result = new(zone()) LNumberTagD(value, temp); return AssignPointerMap(Define(result, result_temp)); } else if (to.IsSmi()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return AssignEnvironment( DefineAsRegister(new(zone()) LDoubleToSmi(value))); } else { ASSERT(to.IsInteger32()); bool truncating = instr->CanTruncateToInt32(); - bool needs_temp = CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating; - LOperand* value = needs_temp ? - UseTempRegister(instr->value()) : UseRegister(instr->value()); + bool needs_temp = + CpuFeatures::IsSafeForSnapshot(isolate(), SSE2) && !truncating; + LOperand* value = needs_temp ? UseTempRegister(val) : UseRegister(val); LOperand* temp = needs_temp ? TempRegister() : NULL; LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value, temp)); @@ -1959,23 +1954,23 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { - HValue* val = instr->value(); - LOperand* value = UseRegister(val); if (!instr->CheckFlag(HValue::kCanOverflow)) { + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LSmiTag(value)); } else if (val->CheckFlag(HInstruction::kUint32)) { + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); - LOperand* temp2 = CpuFeatures::IsSupported(SSE2) ? FixedTemp(xmm1) - : NULL; + LOperand* temp2 = + CpuFeatures::IsSupported(SSE2) ? FixedTemp(xmm1) : NULL; LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2); return AssignPointerMap(DefineSameAsFirst(result)); } else { + LOperand* value = UseRegister(val); LOperand* temp = TempRegister(); LNumberTagI* result = new(zone()) LNumberTagI(value, temp); return AssignPointerMap(DefineSameAsFirst(result)); } } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); LInstruction* result = DefineSameAsFirst(new(zone()) LSmiTag(value)); if (instr->CheckFlag(HValue::kCanOverflow)) { @@ -1984,13 +1979,12 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return result; } else { ASSERT(to.IsDouble()); - if (instr->value()->CheckFlag(HInstruction::kUint32)) { + if (val->CheckFlag(HInstruction::kUint32)) { LOperand* temp = FixedTemp(xmm1); return DefineAsRegister( - new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp)); + new(zone()) LUint32ToDouble(UseRegister(val), temp)); } else { - return DefineAsRegister( - new(zone()) LInteger32ToDouble(Use(instr->value()))); + return DefineAsRegister(new(zone()) LInteger32ToDouble(Use(val))); } } } @@ -2001,7 +1995,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckNonSmi(value)); + LInstruction* result = new(zone()) LCheckNonSmi(value); + if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result); + return result; } @@ -2031,16 +2027,12 @@ LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { - LOperand* value = NULL; - if (!instr->CanOmitMapChecks()) { - value = UseRegisterAtStart(instr->value()); - if (instr->has_migration_target()) info()->MarkAsDeferredCalling(); - } - LCheckMaps* result = new(zone()) LCheckMaps(value); - if (!instr->CanOmitMapChecks()) { - // Note: Only deopts in deferred code. - AssignEnvironment(result); - if (instr->has_migration_target()) return AssignPointerMap(result); + if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps; + LOperand* value = UseRegisterAtStart(instr->value()); + LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value)); + if (instr->HasMigrationTarget()) { + info()->MarkAsDeferredCalling(); + result = AssignPointerMap(result); } return result; } @@ -2266,7 +2258,7 @@ LOperand* LChunkBuilder::GetStoreKeyedValueOperand(HStoreKeyed* instr) { return UseFixed(instr->value(), eax); } - if (!CpuFeatures::IsSafeForSnapshot(SSE2) && + if (!CpuFeatures::IsSafeForSnapshot(isolate(), SSE2) && IsDoubleOrFloatElementsKind(elements_kind)) { return UseRegisterAtStart(instr->value()); } @@ -2345,7 +2337,6 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { - LOperand* object = UseRegister(instr->object()); if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { LOperand* object = UseRegister(instr->object()); LOperand* new_map_reg = TempRegister(); @@ -2355,10 +2346,11 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind( new_map_reg, temp_reg); return result; } else { + LOperand* object = UseFixed(instr->object(), eax); LOperand* context = UseFixed(instr->context(), esi); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, context, NULL, NULL); - return AssignPointerMap(result); + return MarkAsCall(result, instr); } } @@ -2521,7 +2513,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { } else { ASSERT(info()->IsStub()); CodeStubInterfaceDescriptor* descriptor = - info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); + info()->code_stub()->GetInterfaceDescriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor->GetParameterRegister(index); return DefineFixed(result, reg); @@ -2642,6 +2634,7 @@ LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); + outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining(instr->closure(), instr->arguments_count(), @@ -2702,7 +2695,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegister(instr->object()); LOperand* index = UseTempRegister(instr->index()); - return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); + LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index); + LInstruction* result = DefineSameAsFirst(load); + return AssignPointerMap(result); } diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h index 7964b7f6e..fe6f79463 100644 --- a/deps/v8/src/ia32/lithium-ia32.h +++ b/deps/v8/src/ia32/lithium-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_LITHIUM_IA32_H_ #define V8_IA32_LITHIUM_IA32_H_ @@ -99,6 +76,7 @@ class LCodeGen; V(DummyUse) \ V(FlooringDivByConstI) \ V(FlooringDivByPowerOf2I) \ + V(FlooringDivI) \ V(ForInCacheArray) \ V(ForInPrepareMap) \ V(FunctionLiteral) \ @@ -260,11 +238,11 @@ class LInstruction : public ZoneObject { // Interface to the register allocator and iterators. bool ClobbersTemps() const { return IsCall(); } bool ClobbersRegisters() const { return IsCall(); } - virtual bool ClobbersDoubleRegisters() const { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const { return IsCall() || // We only have rudimentary X87Stack tracking, thus in general // cannot handle phi-nodes. - (!CpuFeatures::IsSafeForSnapshot(SSE2) && IsControl()); + (!CpuFeatures::IsSafeForSnapshot(isolate, SSE2) && IsControl()); } virtual bool HasResult() const = 0; @@ -399,9 +377,13 @@ class LInstructionGap V8_FINAL : public LGap { class LClobberDoubles V8_FINAL : public LTemplateInstruction<0, 0, 0> { public: - LClobberDoubles() { ASSERT(!CpuFeatures::IsSafeForSnapshot(SSE2)); } + explicit LClobberDoubles(Isolate* isolate) { + ASSERT(!CpuFeatures::IsSafeForSnapshot(isolate, SSE2)); + } - virtual bool ClobbersDoubleRegisters() const { return true; } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { + return true; + } DECLARE_CONCRETE_INSTRUCTION(ClobberDoubles, "clobber-d") }; @@ -417,7 +399,9 @@ class LGoto V8_FINAL : public LTemplateInstruction<0, 0, 0> { virtual bool IsControl() const V8_OVERRIDE { return true; } int block_id() const { return block_->block_id(); } - virtual bool ClobbersDoubleRegisters() const { return false; } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { + return false; + } bool jumps_to_join() const { return block_->predecessors()->length() > 1; } @@ -744,14 +728,14 @@ class LDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 2> { class LDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { public: - LDivI(LOperand* left, LOperand* right, LOperand* temp) { - inputs_[0] = left; - inputs_[1] = right; + LDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; temps_[0] = temp; } - LOperand* left() { return inputs_[0]; } - LOperand* right() { return inputs_[1]; } + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i") @@ -806,6 +790,23 @@ class LFlooringDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 3> { }; +class LFlooringDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { + public: + LFlooringDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; + temps_[0] = temp; + } + + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } + LOperand* temp() { return temps_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(FlooringDivI, "flooring-div-i") + DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) +}; + + class LMulI V8_FINAL : public LTemplateInstruction<1, 2, 1> { public: LMulI(LOperand* left, LOperand* right, LOperand* temp) { @@ -1993,7 +1994,7 @@ class LCallRuntime V8_FINAL : public LTemplateInstruction<1, 1, 0> { DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime") DECLARE_HYDROGEN_ACCESSOR(CallRuntime) - virtual bool ClobbersDoubleRegisters() const V8_OVERRIDE { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { return save_doubles() == kDontSaveFPRegs; } @@ -2190,11 +2191,6 @@ class LStoreNamedField V8_FINAL : public LTemplateInstruction<0, 2, 2> { DECLARE_HYDROGEN_ACCESSOR(StoreNamedField) virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; - - Handle<Map> transition() const { return hydrogen()->transition_map(); } - Representation representation() const { - return hydrogen()->field_representation(); - } }; @@ -2403,7 +2399,7 @@ class LCheckInstanceType V8_FINAL : public LTemplateInstruction<0, 1, 1> { class LCheckMaps V8_FINAL : public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMaps(LOperand* value) { + explicit LCheckMaps(LOperand* value = NULL) { inputs_[0] = value; } @@ -2730,6 +2726,8 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { next_block_(NULL), allocator_(allocator) { } + Isolate* isolate() const { return graph_->isolate(); } + // Build the sequence for the graph. LPlatformChunk* Build(); @@ -2750,12 +2748,13 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { LInstruction* DoMathClz32(HUnaryMathOperation* instr); LInstruction* DoDivByPowerOf2I(HDiv* instr); LInstruction* DoDivByConstI(HDiv* instr); - LInstruction* DoDivI(HBinaryOperation* instr); + LInstruction* DoDivI(HDiv* instr); LInstruction* DoModByPowerOf2I(HMod* instr); LInstruction* DoModByConstI(HMod* instr); LInstruction* DoModI(HMod* instr); LInstruction* DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr); LInstruction* DoFlooringDivByConstI(HMathFloorOfDiv* instr); + LInstruction* DoFlooringDivI(HMathFloorOfDiv* instr); private: enum Status { diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index 7847b3b39..f27927de9 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -198,7 +175,7 @@ void MacroAssembler::RememberedSetHelper( j(equal, &done, Label::kNear); } StoreBufferOverflowStub store_buffer_overflow = - StoreBufferOverflowStub(save_fp); + StoreBufferOverflowStub(isolate(), save_fp); CallStub(&store_buffer_overflow); if (and_then == kReturnAtEnd) { ret(0); @@ -247,8 +224,8 @@ void MacroAssembler::ClampUint8(Register reg) { void MacroAssembler::SlowTruncateToI(Register result_reg, Register input_reg, int offset) { - DoubleToIStub stub(input_reg, result_reg, offset, true); - call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); + DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true); + call(stub.GetCode(), RelocInfo::CODE_TARGET); } @@ -414,7 +391,7 @@ void MacroAssembler::TaggedToI(Register result_reg, isolate()->factory()->heap_number_map()); j(not_equal, lost_precision, Label::kNear); - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + if (CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)) { ASSERT(!temp.is(no_xmm_reg)); CpuFeatureScope scope(this, SSE2); @@ -634,7 +611,8 @@ void MacroAssembler::RecordWriteForMap( // them. lea(address, FieldOperand(object, HeapObject::kMapOffset)); mov(value, Immediate(map)); - RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp); + RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET, + save_fp); CallStub(&stub); bind(&done); @@ -699,7 +677,8 @@ void MacroAssembler::RecordWrite(Register object, &done, Label::kNear); - RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); + RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, + fp_mode); CallStub(&stub); bind(&done); @@ -713,14 +692,12 @@ void MacroAssembler::RecordWrite(Register object, } -#ifdef ENABLE_DEBUGGER_SUPPORT void MacroAssembler::DebugBreak() { Move(eax, Immediate(0)); mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); - CEntryStub ces(1); - call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); + CEntryStub ces(isolate(), 1); + call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } -#endif void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) { @@ -1424,7 +1401,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, // Note: r0 will contain hash code void MacroAssembler::GetNumberHash(Register r0, Register scratch) { // Xor original key with a seed. - if (Serializer::enabled()) { + if (Serializer::enabled(isolate())) { ExternalReference roots_array_start = ExternalReference::roots_array_start(isolate()); mov(scratch, Immediate(Heap::kHashSeedRootIndex)); @@ -2182,12 +2159,12 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs. - call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); + call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); } void MacroAssembler::TailCallStub(CodeStub* stub) { - jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET); + jmp(stub->GetCode(), RelocInfo::CODE_TARGET); } @@ -2202,14 +2179,6 @@ bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { } -void MacroAssembler::IllegalOperation(int num_arguments) { - if (num_arguments > 0) { - add(esp, Immediate(num_arguments * kPointerSize)); - } - mov(eax, Immediate(isolate()->factory()->undefined_value())); -} - - void MacroAssembler::IndexFromHash(Register hash, Register index) { // The assert checks that the constants for the maximum number of digits // for an array index cached in the hash field and the number of bits @@ -2235,10 +2204,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // If the expected number of arguments of the runtime function is // constant, we check that the actual number of arguments match the // expectation. - if (f->nargs >= 0 && f->nargs != num_arguments) { - IllegalOperation(num_arguments); - return; - } + CHECK(f->nargs < 0 || f->nargs == num_arguments); // TODO(1236192): Most runtime routines don't need the number of // arguments passed in because it is constant. At some point we @@ -2246,8 +2212,10 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // smarter. Move(eax, Immediate(num_arguments)); mov(ebx, Immediate(ExternalReference(f, isolate()))); - CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? save_doubles - : kDontSaveFPRegs); + CEntryStub ces(isolate(), + 1, + CpuFeatures::IsSupported(SSE2) ? save_doubles + : kDontSaveFPRegs); CallStub(&ces); } @@ -2257,7 +2225,7 @@ void MacroAssembler::CallExternalReference(ExternalReference ref, mov(eax, Immediate(num_arguments)); mov(ebx, Immediate(ref)); - CEntryStub stub(1); + CEntryStub stub(isolate(), 1); CallStub(&stub); } @@ -2298,7 +2266,7 @@ void MacroAssembler::PrepareCallApiFunction(int argc) { void MacroAssembler::CallApiFunctionAndReturn( Register function_address, - Address thunk_address, + ExternalReference thunk_ref, Operand thunk_last_arg, int stack_space, Operand return_value_operand, @@ -2329,17 +2297,15 @@ void MacroAssembler::CallApiFunctionAndReturn( Label profiler_disabled; Label end_profiler_check; - bool* is_profiling_flag = - isolate()->cpu_profiler()->is_profiling_address(); - STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); - mov(eax, Immediate(reinterpret_cast<Address>(is_profiling_flag))); + mov(eax, Immediate(ExternalReference::is_profiling_address(isolate()))); cmpb(Operand(eax, 0), 0); j(zero, &profiler_disabled); // Additional parameter is the address of the actual getter function. mov(thunk_last_arg, function_address); // Call the api function. - call(thunk_address, RelocInfo::RUNTIME_ENTRY); + mov(eax, Immediate(thunk_ref)); + call(eax); jmp(&end_profiler_check); bind(&profiler_disabled); @@ -2451,8 +2417,8 @@ void MacroAssembler::CallApiFunctionAndReturn( void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { // Set the entry point and jump to the C entry runtime stub. mov(ebx, Immediate(ext)); - CEntryStub ces(1); - jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1); + jmp(ces.GetCode(), RelocInfo::CODE_TARGET); } diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index 698c81fe8..f8c240132 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_ #define V8_IA32_MACRO_ASSEMBLER_IA32_H_ @@ -221,12 +198,10 @@ class MacroAssembler: public Assembler { Register scratch2, SaveFPRegsMode save_fp); -#ifdef ENABLE_DEBUGGER_SUPPORT // --------------------------------------------------------------------------- // Debugger Support void DebugBreak(); -#endif // Generates function and stub prologue code. void Prologue(PrologueFrameMode frame_mode); @@ -727,10 +702,6 @@ class MacroAssembler: public Assembler { Label* miss, bool miss_on_bound_function = false); - // Generates code for reporting that an illegal operation has - // occurred. - void IllegalOperation(int num_arguments); - // Picks out an array index from the hash field. // Register use: // hash - holds the index's hash. Clobbered. @@ -809,7 +780,7 @@ class MacroAssembler: public Assembler { // caller-save registers. Restores context. On return removes // stack_space * kPointerSize (GCed). void CallApiFunctionAndReturn(Register function_address, - Address thunk_address, + ExternalReference thunk_ref, Operand thunk_last_arg, int stack_space, Operand return_value_operand, @@ -1012,13 +983,6 @@ class MacroAssembler: public Assembler { Register scratch, AllocationFlags flags); - // Helper for PopHandleScope. Allowed to perform a GC and returns - // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and - // possibly returns a failure object indicating an allocation failure. - MUST_USE_RESULT MaybeObject* PopHandleScopeHelper(Register saved, - Register scratch, - bool gc_allowed); - // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. void InNewSpace(Register object, Register scratch, diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc index 255df3285..22c620e7c 100644 --- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1126,7 +1103,7 @@ int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address, ASSERT(*return_address <= re_code->instruction_start() + re_code->instruction_size()); - MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); + Object* result = Execution::HandleStackGuardInterrupt(isolate); if (*code_handle != re_code) { // Return address no longer valid int delta = code_handle->address() - re_code->address(); diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h index 393333600..ab5b75b09 100644 --- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h +++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_ #define V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_ diff --git a/deps/v8/src/ia32/simulator-ia32.cc b/deps/v8/src/ia32/simulator-ia32.cc index b6f284733..20edae83a 100644 --- a/deps/v8/src/ia32/simulator-ia32.cc +++ b/deps/v8/src/ia32/simulator-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Since there is no simulator for the ia32 architecture this file is empty. diff --git a/deps/v8/src/ia32/simulator-ia32.h b/deps/v8/src/ia32/simulator-ia32.h index 478d4ce5c..10356284e 100644 --- a/deps/v8/src/ia32/simulator-ia32.h +++ b/deps/v8/src/ia32/simulator-ia32.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IA32_SIMULATOR_IA32_H_ #define V8_IA32_SIMULATOR_IA32_H_ diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc index 1a745c7b7..adc8cd59a 100644 --- a/deps/v8/src/ia32/stub-cache-ia32.cc +++ b/deps/v8/src/ia32/stub-cache-ia32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -446,7 +423,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, __ mov(api_function_address, Immediate(function_address)); // Jump to stub. - CallApiFunctionStub stub(is_store, call_data_undefined, argc); + CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); __ TailCallStub(&stub); } @@ -473,7 +450,7 @@ void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm, JSGlobalObject::EnsurePropertyCell(global, name); ASSERT(cell->value()->IsTheHole()); Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value(); - if (Serializer::enabled()) { + if (Serializer::enabled(masm->isolate())) { __ mov(scratch, Immediate(cell)); __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), Immediate(the_hole)); @@ -529,6 +506,21 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = descriptors->GetFieldType(descriptor); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + Label do_store; + while (true) { + __ CompareMap(value_reg, it.Current()); + it.Advance(); + if (it.Done()) { + __ j(not_equal, miss_label); + break; + } + __ j(equal, &do_store, Label::kNear); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { Label do_store, heap_number; __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); @@ -699,6 +691,21 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = lookup->GetFieldType(); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + Label do_store; + while (true) { + __ CompareMap(value_reg, it.Current()); + it.Advance(); + if (it.Done()) { + __ j(not_equal, miss_label); + break; + } + __ j(equal, &do_store, Label::kNear); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { // Load the double storage. if (index < 0) { @@ -824,7 +831,8 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, int depth = 0; Handle<JSObject> current = Handle<JSObject>::null(); - if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); + if (type->IsConstant()) current = + Handle<JSObject>::cast(type->AsConstant()->Value()); Handle<JSObject> prototype = Handle<JSObject>::null(); Handle<Map> current_map = receiver_map; Handle<Map> holder_map(holder->map()); @@ -847,7 +855,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(current.is_null() || - current->property_dictionary()->FindEntry(*name) == + current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); GenerateDictionaryNegativeLookup(masm(), miss, reg, name, @@ -1002,15 +1010,17 @@ void LoadStubCompiler::GenerateLoadField(Register reg, Representation representation) { if (!reg.is(receiver())) __ mov(receiver(), reg); if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(field.is_inobject(holder), + LoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } else { - KeyedLoadFieldStub stub(field.is_inobject(holder), + KeyedLoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } } @@ -1056,7 +1066,7 @@ void LoadStubCompiler::GenerateLoadCallback( Address function_address = v8::ToCData<Address>(callback->getter()); __ mov(getter_address, Immediate(function_address)); - CallApiGetterStub stub; + CallApiGetterStub stub(isolate()); __ TailCallStub(&stub); } @@ -1167,17 +1177,6 @@ void LoadStubCompiler::GenerateLoadInterceptor( } -void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { - Label success; - // Check that the object is a boolean. - __ cmp(object, factory()->true_value()); - __ j(equal, &success); - __ cmp(object, factory()->false_value()); - __ j(not_equal, miss); - __ bind(&success); -} - - Handle<Code> StoreStubCompiler::CompileStoreCallback( Handle<JSObject> object, Handle<JSObject> holder, @@ -1414,7 +1413,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( HandlerFrontendHeader(type, receiver(), global, name, &miss); // Get the value from the cell. - if (Serializer::enabled()) { + if (Serializer::enabled(isolate())) { __ mov(eax, Immediate(cell)); __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset)); } else { diff --git a/deps/v8/src/ic-inl.h b/deps/v8/src/ic-inl.h index ebe0fb9b3..010df08c6 100644 --- a/deps/v8/src/ic-inl.h +++ b/deps/v8/src/ic-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IC_INL_H_ #define V8_IC_INL_H_ @@ -42,7 +19,6 @@ Address IC::address() const { // Get the address of the call. Address result = Assembler::target_address_from_return_address(pc()); -#ifdef ENABLE_DEBUGGER_SUPPORT Debug* debug = isolate()->debug(); // First check if any break points are active if not just return the address // of the call. @@ -68,9 +44,6 @@ Address IC::address() const { // No break point here just return the address of the call. return result; } -#else - return result; -#endif } @@ -79,7 +52,6 @@ ConstantPoolArray* IC::constant_pool() const { return NULL; } else { Handle<ConstantPoolArray> result = raw_constant_pool_; -#ifdef ENABLE_DEBUGGER_SUPPORT Debug* debug = isolate()->debug(); // First check if any break points are active if not just return the // original constant pool. @@ -94,7 +66,6 @@ ConstantPoolArray* IC::constant_pool() const { // constant pool for the original code instead of the breakpointed code. return GetOriginalCode()->constant_pool(); } -#endif return *result; } } diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc index a32717362..3897f8845 100644 --- a/deps/v8/src/ic.cc +++ b/deps/v8/src/ic.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -31,11 +8,11 @@ #include "api.h" #include "arguments.h" #include "codegen.h" +#include "conversions.h" #include "execution.h" #include "ic-inl.h" #include "runtime.h" #include "stub-cache.h" -#include "v8conversions.h" namespace v8 { namespace internal { @@ -91,9 +68,11 @@ void IC::TraceIC(const char* type, } JavaScriptFrame::PrintTop(isolate(), stdout, false, true); ExtraICState extra_state = new_target->extra_ic_state(); - const char* modifier = - GetTransitionMarkModifier( - KeyedStoreIC::GetKeyedAccessStoreMode(extra_state)); + const char* modifier = ""; + if (new_target->kind() == Code::KEYED_STORE_IC) { + modifier = GetTransitionMarkModifier( + KeyedStoreIC::GetKeyedAccessStoreMode(extra_state)); + } PrintF(" (%c->%c%s)", TransitionMarkFromState(state()), TransitionMarkFromState(new_state), @@ -121,7 +100,8 @@ void IC::TraceIC(const char* type, IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate), - target_set_(false) { + target_set_(false), + target_maps_set_(false) { // To improve the performance of the (much used) IC code, we unfold a few // levels of the stack frame iteration code. This yields a ~35% speedup when // running DeltaBlue and a ~25% speedup of gbemu with the '--nouse-ic' flag. @@ -166,7 +146,6 @@ IC::IC(FrameDepth depth, Isolate* isolate) } -#ifdef ENABLE_DEBUGGER_SUPPORT SharedFunctionInfo* IC::GetSharedFunctionInfo() const { // Compute the JavaScript frame for the frame pointer of this IC // structure. We need this to be able to find the function @@ -197,7 +176,6 @@ Code* IC::GetOriginalCode() const { ASSERT(original_code->IsCode()); return original_code; } -#endif static bool HasInterceptorGetter(JSObject* object) { @@ -216,7 +194,7 @@ static void LookupForRead(Handle<Object> object, // Skip all the objects with named interceptors, but // without actual getter. while (true) { - object->Lookup(*name, lookup); + object->Lookup(name, lookup); // Besides normal conditions (property not found or it's not // an interceptor), bail out if lookup is not cacheable: we won't // be able to IC it anyway and regular lookup should work fine. @@ -229,7 +207,7 @@ static void LookupForRead(Handle<Object> object, return; } - holder->LocalLookupRealNamedProperty(*name, lookup); + holder->LocalLookupRealNamedProperty(name, lookup); if (lookup->IsFound()) { ASSERT(!lookup->IsInterceptor()); return; @@ -248,12 +226,7 @@ static void LookupForRead(Handle<Object> object, bool IC::TryRemoveInvalidPrototypeDependentStub(Handle<Object> receiver, Handle<String> name) { - if (target()->is_keyed_stub()) { - // Determine whether the failure is due to a name failure. - if (!name->IsName()) return false; - Name* stub_name = target()->FindFirstName(); - if (*name != stub_name) return false; - } + if (!IsNameCompatibleWithMonomorphicPrototypeFailure(name)) return false; InlineCacheHolderFlag cache_holder = Code::ExtractCacheHolderFromFlags(target()->flags()); @@ -298,7 +271,7 @@ bool IC::TryRemoveInvalidPrototypeDependentStub(Handle<Object> receiver, // If the IC is shared between multiple receivers (slow dictionary mode), then // the map cannot be deprecated and the stub invalidated. if (cache_holder == OWN_MAP) { - Map* old_map = target()->FindFirstMap(); + Map* old_map = FirstTargetMap(); if (old_map == *map) return true; if (old_map != NULL) { if (old_map->is_deprecated()) return true; @@ -312,7 +285,7 @@ bool IC::TryRemoveInvalidPrototypeDependentStub(Handle<Object> receiver, if (receiver->IsGlobalObject()) { LookupResult lookup(isolate()); GlobalObject* global = GlobalObject::cast(*receiver); - global->LocalLookupRealNamedProperty(*name, &lookup); + global->LocalLookupRealNamedProperty(name, &lookup); if (!lookup.IsFound()) return false; PropertyCell* cell = global->GetPropertyCell(&lookup); return cell->type()->IsConstant(); @@ -336,6 +309,18 @@ void IC::TryRemoveInvalidHandlers(Handle<Map> map, Handle<String> name) { } +bool IC::IsNameCompatibleWithMonomorphicPrototypeFailure(Handle<Object> name) { + if (target()->is_keyed_stub()) { + // Determine whether the failure is due to a name failure. + if (!name->IsName()) return false; + Name* stub_name = target()->FindFirstName(); + if (*name != stub_name) return false; + } + + return true; +} + + void IC::UpdateState(Handle<Object> receiver, Handle<Object> name) { if (!name->IsString()) return; if (state() != MONOMORPHIC) { @@ -352,8 +337,9 @@ void IC::UpdateState(Handle<Object> receiver, Handle<Object> name) { // because of changes in the prototype chain to avoid hitting it // again. if (TryRemoveInvalidPrototypeDependentStub( - receiver, Handle<String>::cast(name))) { - return MarkMonomorphicPrototypeFailure(); + receiver, Handle<String>::cast(name)) && + TryMarkMonomorphicPrototypeFailure(name)) { + return; } // The builtins object is special. It only changes when JavaScript @@ -366,22 +352,22 @@ void IC::UpdateState(Handle<Object> receiver, Handle<Object> name) { } -Failure* IC::TypeError(const char* type, - Handle<Object> object, - Handle<Object> key) { +MaybeHandle<Object> IC::TypeError(const char* type, + Handle<Object> object, + Handle<Object> key) { HandleScope scope(isolate()); Handle<Object> args[2] = { key, object }; Handle<Object> error = isolate()->factory()->NewTypeError( type, HandleVector(args, 2)); - return isolate()->Throw(*error); + return isolate()->Throw<Object>(error); } -Failure* IC::ReferenceError(const char* type, Handle<String> name) { +MaybeHandle<Object> IC::ReferenceError(const char* type, Handle<String> name) { HandleScope scope(isolate()); Handle<Object> error = isolate()->factory()->NewReferenceError( type, HandleVector(&name, 1)); - return isolate()->Throw(*error); + return isolate()->Throw<Object>(error); } @@ -406,6 +392,10 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) { target->is_inline_cache_stub()) { int delta = ComputeTypeInfoCountDelta(old_target->ic_state(), target->ic_state()); + // Call ICs don't have interesting state changes from this point + // of view. + ASSERT(target->kind() != Code::CALL_IC || delta == 0); + // Not all Code objects have TypeFeedbackInfo. if (host->type_feedback_info()->IsTypeFeedbackInfo() && delta != 0) { TypeFeedbackInfo* info = @@ -426,6 +416,42 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) { } +void IC::RegisterWeakMapDependency(Handle<Code> stub) { + if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_ic && + stub->CanBeWeakStub()) { + ASSERT(!stub->is_weak_stub()); + MapHandleList maps; + stub->FindAllMaps(&maps); + if (maps.length() == 1 && stub->IsWeakObjectInIC(*maps.at(0))) { + Map::AddDependentIC(maps.at(0), stub); + stub->mark_as_weak_stub(); + if (FLAG_enable_ool_constant_pool) { + stub->constant_pool()->set_weak_object_state( + ConstantPoolArray::WEAK_OBJECTS_IN_IC); + } + } + } +} + + +void IC::InvalidateMaps(Code* stub) { + ASSERT(stub->is_weak_stub()); + stub->mark_as_invalidated_weak_stub(); + Isolate* isolate = stub->GetIsolate(); + Heap* heap = isolate->heap(); + Object* undefined = heap->undefined_value(); + int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); + for (RelocIterator it(stub, mode_mask); !it.done(); it.next()) { + RelocInfo::Mode mode = it.rinfo()->rmode(); + if (mode == RelocInfo::EMBEDDED_OBJECT && + it.rinfo()->target_object()->IsMap()) { + it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER); + } + } + CPU::FlushICache(stub->instruction_start(), stub->instruction_size()); +} + + void IC::Clear(Isolate* isolate, Address address, ConstantPoolArray* constant_pool) { Code* target = GetTargetAtAddress(address, constant_pool); @@ -442,6 +468,8 @@ void IC::Clear(Isolate* isolate, Address address, return StoreIC::Clear(isolate, address, target, constant_pool); case Code::KEYED_STORE_IC: return KeyedStoreIC::Clear(isolate, address, target, constant_pool); + case Code::CALL_IC: + return CallIC::Clear(isolate, address, target, constant_pool); case Code::COMPARE_IC: return CompareIC::Clear(isolate, address, target, constant_pool); case Code::COMPARE_NIL_IC: @@ -468,6 +496,15 @@ void KeyedLoadIC::Clear(Isolate* isolate, } +void CallIC::Clear(Isolate* isolate, + Address address, + Code* target, + ConstantPoolArray* constant_pool) { + // Currently, CallIC doesn't have state changes. + ASSERT(target->ic_state() == v8::internal::GENERIC); +} + + void LoadIC::Clear(Isolate* isolate, Address address, Code* target, @@ -527,8 +564,7 @@ static bool MigrateDeprecated(Handle<Object> object) { } -MaybeObject* LoadIC::Load(Handle<Object> object, - Handle<String> name) { +MaybeHandle<Object> LoadIC::Load(Handle<Object> object, Handle<String> name) { // If the object is undefined or null it's illegal to try to get any // of its properties; throw a TypeError in that case. if (object->IsUndefined() || object->IsNull()) { @@ -538,14 +574,14 @@ MaybeObject* LoadIC::Load(Handle<Object> object, if (FLAG_use_ic) { // Use specialized code for getting prototype of functions. if (object->IsJSFunction() && - name->Equals(isolate()->heap()->prototype_string()) && + String::Equals(isolate()->factory()->prototype_string(), name) && Handle<JSFunction>::cast(object)->should_have_prototype()) { Handle<Code> stub; if (state() == UNINITIALIZED) { stub = pre_monomorphic_stub(); } else if (state() == PREMONOMORPHIC) { - FunctionPrototypeStub function_prototype_stub(kind()); - stub = function_prototype_stub.GetCode(isolate()); + FunctionPrototypeStub function_prototype_stub(isolate(), kind()); + stub = function_prototype_stub.GetCode(); } else if (state() != MEGAMORPHIC) { ASSERT(state() != GENERIC); stub = megamorphic_stub(); @@ -554,7 +590,7 @@ MaybeObject* LoadIC::Load(Handle<Object> object, set_target(*stub); if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n"); } - return *Accessors::FunctionGetPrototype(Handle<JSFunction>::cast(object)); + return Accessors::FunctionGetPrototype(Handle<JSFunction>::cast(object)); } } @@ -564,10 +600,13 @@ MaybeObject* LoadIC::Load(Handle<Object> object, if (kind() == Code::KEYED_LOAD_IC && name->AsArrayIndex(&index)) { // Rewrite to the generic keyed load stub. if (FLAG_use_ic) set_target(*generic_stub()); - Handle<Object> result = - Runtime::GetElementOrCharAt(isolate(), object, index); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Runtime::GetElementOrCharAt(isolate(), object, index), + Object); + return result; } bool use_ic = MigrateDeprecated(object) ? false : FLAG_use_ic; @@ -589,17 +628,19 @@ MaybeObject* LoadIC::Load(Handle<Object> object, PropertyAttributes attr; // Get the property. - Handle<Object> result = - Object::GetProperty(object, object, &lookup, name, &attr); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - // If the property is not present, check if we need to throw an - // exception. + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Object::GetProperty(object, object, &lookup, name, &attr), + Object); + // If the property is not present, check if we need to throw an exception. if ((lookup.IsInterceptor() || lookup.IsHandler()) && attr == ABSENT && IsUndeclaredGlobal(object)) { return ReferenceError("not_defined", name); } - return *result; + return result; } @@ -624,17 +665,18 @@ bool IC::UpdatePolymorphicIC(Handle<HeapType> type, TypeHandleList types; CodeHandleList handlers; - target()->FindAllTypes(&types); + TargetTypes(&types); int number_of_types = types.length(); int deprecated_types = 0; int handler_to_overwrite = -1; for (int i = 0; i < number_of_types; i++) { Handle<HeapType> current_type = types.at(i); - if (current_type->IsClass() && current_type->AsClass()->is_deprecated()) { + if (current_type->IsClass() && + current_type->AsClass()->Map()->is_deprecated()) { // Filter out deprecated maps to ensure their instances get migrated. ++deprecated_types; - } else if (type->IsCurrently(current_type)) { + } else if (type->NowIs(current_type)) { // If the receiver type is already in the polymorphic IC, this indicates // there was a prototoype chain failure. In that case, just overwrite the // handler. @@ -642,8 +684,8 @@ bool IC::UpdatePolymorphicIC(Handle<HeapType> type, } else if (handler_to_overwrite == -1 && current_type->IsClass() && type->IsClass() && - IsTransitionOfMonomorphicTarget(*current_type->AsClass(), - *type->AsClass())) { + IsTransitionOfMonomorphicTarget(*current_type->AsClass()->Map(), + *type->AsClass()->Map())) { handler_to_overwrite = i; } } @@ -658,7 +700,7 @@ bool IC::UpdatePolymorphicIC(Handle<HeapType> type, number_of_valid_types++; if (handler_to_overwrite >= 0) { handlers.Set(handler_to_overwrite, code); - if (!type->IsCurrently(types.at(handler_to_overwrite))) { + if (!type->NowIs(types.at(handler_to_overwrite))) { types.Set(handler_to_overwrite, type); } } else { @@ -676,19 +718,20 @@ bool IC::UpdatePolymorphicIC(Handle<HeapType> type, Handle<HeapType> IC::CurrentTypeOf(Handle<Object> object, Isolate* isolate) { return object->IsJSGlobalObject() ? HeapType::Constant(Handle<JSGlobalObject>::cast(object), isolate) - : HeapType::OfCurrently(object, isolate); + : HeapType::NowOf(object, isolate); } Handle<Map> IC::TypeToMap(HeapType* type, Isolate* isolate) { if (type->Is(HeapType::Number())) return isolate->factory()->heap_number_map(); - if (type->Is(HeapType::Boolean())) return isolate->factory()->oddball_map(); + if (type->Is(HeapType::Boolean())) return isolate->factory()->boolean_map(); if (type->IsConstant()) { - return handle(Handle<JSGlobalObject>::cast(type->AsConstant())->map()); + return handle( + Handle<JSGlobalObject>::cast(type->AsConstant()->Value())->map()); } ASSERT(type->IsClass()); - return type->AsClass(); + return type->AsClass()->Map(); } @@ -727,7 +770,7 @@ void IC::UpdateMonomorphicIC(Handle<HeapType> type, void IC::CopyICToMegamorphicCache(Handle<String> name) { TypeHandleList types; CodeHandleList handlers; - target()->FindAllTypes(&types); + TargetTypes(&types); if (!target()->FindHandlers(&handlers, types.length())) return; for (int i = 0; i < types.length(); i++) { UpdateMegamorphicCache(*types.at(i), *name, *handlers.at(i)); @@ -800,11 +843,11 @@ Handle<Code> LoadIC::SimpleFieldLoad(int offset, bool inobject, Representation representation) { if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(inobject, offset, representation); - return stub.GetCode(isolate()); + LoadFieldStub stub(isolate(), inobject, offset, representation); + return stub.GetCode(); } else { - KeyedLoadFieldStub stub(inobject, offset, representation); - return stub.GetCode(isolate()); + KeyedLoadFieldStub stub(isolate(), inobject, offset, representation); + return stub.GetCode(); } } @@ -880,19 +923,20 @@ Handle<Code> LoadIC::CompileHandler(LookupResult* lookup, Handle<String> name, Handle<Object> unused, InlineCacheHolderFlag cache_holder) { - if (object->IsString() && name->Equals(isolate()->heap()->length_string())) { + if (object->IsString() && + String::Equals(isolate()->factory()->length_string(), name)) { int length_index = String::kLengthOffset / kPointerSize; return SimpleFieldLoad(length_index); } if (object->IsStringWrapper() && - name->Equals(isolate()->heap()->length_string())) { + String::Equals(isolate()->factory()->length_string(), name)) { if (kind() == Code::LOAD_IC) { - StringLengthStub string_length_stub; - return string_length_stub.GetCode(isolate()); + StringLengthStub string_length_stub(isolate()); + return string_length_stub.GetCode(); } else { - KeyedStringLengthStub string_length_stub; - return string_length_stub.GetCode(isolate()); + KeyedStringLengthStub string_length_stub(isolate()); + return string_length_stub.GetCode(); } } @@ -981,9 +1025,7 @@ Handle<Code> LoadIC::CompileHandler(LookupResult* lookup, return compiler.CompileLoadViaGetter(type, holder, name, function); } // TODO(dcarney): Handle correctly. - if (callback->IsDeclaredAccessorInfo()) break; - ASSERT(callback->IsForeign()); - // No IC support for old-style native accessors. + ASSERT(callback->IsDeclaredAccessorInfo()); break; } case INTERCEPTOR: @@ -1028,19 +1070,13 @@ Handle<Code> KeyedLoadIC::LoadElementStub(Handle<JSObject> receiver) { Handle<Map> receiver_map(receiver->map(), isolate()); MapHandleList target_receiver_maps; - if (state() == UNINITIALIZED || state() == PREMONOMORPHIC) { - // Optimistically assume that ICs that haven't reached the MONOMORPHIC state - // yet will do so and stay there. - return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map); - } - if (target().is_identical_to(string_stub())) { target_receiver_maps.Add(isolate()->factory()->string_map()); } else { - target()->FindAllMaps(&target_receiver_maps); - if (target_receiver_maps.length() == 0) { - return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map); - } + TargetMaps(&target_receiver_maps); + } + if (target_receiver_maps.length() == 0) { + return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map); } // The first time a receiver is seen that is a transitioned version of the @@ -1080,12 +1116,19 @@ Handle<Code> KeyedLoadIC::LoadElementStub(Handle<JSObject> receiver) { } -MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) { +MaybeHandle<Object> KeyedLoadIC::Load(Handle<Object> object, + Handle<Object> key) { if (MigrateDeprecated(object)) { - return Runtime::GetObjectPropertyOrFail(isolate(), object, key); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Runtime::GetObjectProperty(isolate(), object, key), + Object); + return result; } - MaybeObject* maybe_object = NULL; + Handle<Object> load_handle; Handle<Code> stub = generic_stub(); // Check for non-string values that can be converted into an @@ -1093,8 +1136,11 @@ MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) { key = TryConvertKey(key, isolate()); if (key->IsInternalizedString()) { - maybe_object = LoadIC::Load(object, Handle<String>::cast(key)); - if (maybe_object->IsFailure()) return maybe_object; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + load_handle, + LoadIC::Load(object, Handle<String>::cast(key)), + Object); } else if (FLAG_use_ic && !object->IsAccessCheckNeeded()) { if (object->IsString() && key->IsNumber()) { if (state() == UNINITIALIZED) stub = string_stub(); @@ -1105,7 +1151,7 @@ MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) { stub = sloppy_arguments_stub(); } else if (receiver->HasIndexedInterceptor()) { stub = indexed_interceptor_stub(); - } else if (!key->ToSmi()->IsFailure() && + } else if (!Object::ToSmi(isolate(), key).is_null() && (!target().is_identical_to(sloppy_arguments_stub()))) { stub = LoadElementStub(receiver); } @@ -1120,8 +1166,14 @@ MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) { TRACE_IC("LoadIC", key); } - if (maybe_object != NULL) return maybe_object; - return Runtime::GetObjectPropertyOrFail(isolate(), object, key); + if (!load_handle.is_null()) return load_handle; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Runtime::GetObjectProperty(isolate(), object, key), + Object); + return result; } @@ -1131,10 +1183,10 @@ static bool LookupForWrite(Handle<JSObject> receiver, LookupResult* lookup, IC* ic) { Handle<JSObject> holder = receiver; - receiver->Lookup(*name, lookup); + receiver->Lookup(name, lookup); if (lookup->IsFound()) { if (lookup->IsInterceptor() && !HasInterceptorSetter(lookup->holder())) { - receiver->LocalLookupRealNamedProperty(*name, lookup); + receiver->LocalLookupRealNamedProperty(name, lookup); if (!lookup->IsFound()) return false; } @@ -1160,9 +1212,7 @@ static bool LookupForWrite(Handle<JSObject> receiver, // chain check. This avoids a double lookup, but requires us to pass in the // receiver when trying to fetch extra information from the transition. receiver->map()->LookupTransition(*holder, *name, lookup); - if (!lookup->IsTransition()) return false; - PropertyDetails target_details = lookup->GetTransitionDetails(); - if (target_details.IsReadOnly()) return false; + if (!lookup->IsTransition() || lookup->IsReadOnly()) return false; // If the value that's being stored does not fit in the field that the // instance would transition to, create a new transition that fits the value. @@ -1171,30 +1221,38 @@ static bool LookupForWrite(Handle<JSObject> receiver, // Ensure the instance and its map were migrated before trying to update the // transition target. ASSERT(!receiver->map()->is_deprecated()); - if (!value->FitsRepresentation(target_details.representation())) { + if (!lookup->CanHoldValue(value)) { Handle<Map> target(lookup->GetTransitionTarget()); + Representation field_representation = value->OptimalRepresentation(); + Handle<HeapType> field_type = value->OptimalType( + lookup->isolate(), field_representation); Map::GeneralizeRepresentation( target, target->LastAdded(), - value->OptimalRepresentation(), FORCE_FIELD); + field_representation, field_type, FORCE_FIELD); // Lookup the transition again since the transition tree may have changed // entirely by the migration above. receiver->map()->LookupTransition(*holder, *name, lookup); if (!lookup->IsTransition()) return false; - ic->MarkMonomorphicPrototypeFailure(); + return ic->TryMarkMonomorphicPrototypeFailure(name); } + return true; } -MaybeObject* StoreIC::Store(Handle<Object> object, - Handle<String> name, - Handle<Object> value, - JSReceiver::StoreFromKeyed store_mode) { +MaybeHandle<Object> StoreIC::Store(Handle<Object> object, + Handle<String> name, + Handle<Object> value, + JSReceiver::StoreFromKeyed store_mode) { if (MigrateDeprecated(object) || object->IsJSProxy()) { - Handle<Object> result = JSReceiver::SetProperty( - Handle<JSReceiver>::cast(object), name, value, NONE, strict_mode()); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + Handle<JSReceiver> receiver = Handle<JSReceiver>::cast(object); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + JSReceiver::SetProperty(receiver, name, value, NONE, strict_mode()), + Object); + return result; } // If the object is undefined or null it's illegal to try to set any @@ -1205,31 +1263,38 @@ MaybeObject* StoreIC::Store(Handle<Object> object, // The length property of string values is read-only. Throw in strict mode. if (strict_mode() == STRICT && object->IsString() && - name->Equals(isolate()->heap()->length_string())) { + String::Equals(isolate()->factory()->length_string(), name)) { return TypeError("strict_read_only_property", object, name); } // Ignore other stores where the receiver is not a JSObject. // TODO(1475): Must check prototype chains of object wrappers. - if (!object->IsJSObject()) return *value; + if (!object->IsJSObject()) return value; Handle<JSObject> receiver = Handle<JSObject>::cast(object); // Check if the given name is an array index. uint32_t index; if (name->AsArrayIndex(&index)) { - Handle<Object> result = - JSObject::SetElement(receiver, index, value, NONE, strict_mode()); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *value; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + JSObject::SetElement(receiver, index, value, NONE, strict_mode()), + Object); + return value; } // Observed objects are always modified through the runtime. if (receiver->map()->is_observed()) { - Handle<Object> result = JSReceiver::SetProperty( - receiver, name, value, NONE, strict_mode(), store_mode); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + JSReceiver::SetProperty( + receiver, name, value, NONE, strict_mode(), store_mode), + Object); + return result; } LookupResult lookup(isolate()); @@ -1257,10 +1322,31 @@ MaybeObject* StoreIC::Store(Handle<Object> object, } // Set the property. - Handle<Object> result = JSReceiver::SetProperty( - receiver, name, value, NONE, strict_mode(), store_mode); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + JSReceiver::SetProperty( + receiver, name, value, NONE, strict_mode(), store_mode), + Object); + return result; +} + + +void CallIC::State::Print(StringStream* stream) const { + stream->Add("(args(%d), ", + argc_); + stream->Add("%s, ", + call_type_ == CallIC::METHOD ? "METHOD" : "FUNCTION"); +} + + +Handle<Code> CallIC::initialize_stub(Isolate* isolate, + int argc, + CallType call_type) { + CallICStub stub(isolate, State::DefaultCallState(argc, call_type)); + Handle<Code> code = stub.GetCode(); + return code; } @@ -1319,93 +1405,80 @@ Handle<Code> StoreIC::CompileHandler(LookupResult* lookup, Handle<JSObject> holder(lookup->holder()); // Handlers do not use strict mode. StoreStubCompiler compiler(isolate(), SLOPPY, kind()); - switch (lookup->type()) { - case FIELD: - return compiler.CompileStoreField(receiver, lookup, name); - case TRANSITION: { - // Explicitly pass in the receiver map since LookupForWrite may have - // stored something else than the receiver in the holder. - Handle<Map> transition(lookup->GetTransitionTarget()); - PropertyDetails details = transition->GetLastDescriptorDetails(); - - if (details.type() == CALLBACKS || details.attributes() != NONE) break; + if (lookup->IsTransition()) { + // Explicitly pass in the receiver map since LookupForWrite may have + // stored something else than the receiver in the holder. + Handle<Map> transition(lookup->GetTransitionTarget()); + PropertyDetails details = lookup->GetPropertyDetails(); + if (details.type() != CALLBACKS && details.attributes() == NONE) { return compiler.CompileStoreTransition( receiver, lookup, transition, name); } - case NORMAL: - if (kind() == Code::KEYED_STORE_IC) break; - if (receiver->IsJSGlobalProxy() || receiver->IsGlobalObject()) { - // The stub generated for the global object picks the value directly - // from the property cell. So the property must be directly on the - // global object. - Handle<GlobalObject> global = receiver->IsJSGlobalProxy() - ? handle(GlobalObject::cast(receiver->GetPrototype())) - : Handle<GlobalObject>::cast(receiver); - Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate()); - Handle<HeapType> union_type = PropertyCell::UpdatedType(cell, value); - StoreGlobalStub stub( - union_type->IsConstant(), receiver->IsJSGlobalProxy()); - Handle<Code> code = stub.GetCodeCopyFromTemplate( - isolate(), global, cell); - // TODO(verwaest): Move caching of these NORMAL stubs outside as well. - HeapObject::UpdateMapCodeCache(receiver, name, code); - return code; - } - ASSERT(holder.is_identical_to(receiver)); - return isolate()->builtins()->StoreIC_Normal(); - case CALLBACKS: { - Handle<Object> callback(lookup->GetCallbackObject(), isolate()); - if (callback->IsExecutableAccessorInfo()) { - Handle<ExecutableAccessorInfo> info = - Handle<ExecutableAccessorInfo>::cast(callback); - if (v8::ToCData<Address>(info->setter()) == 0) break; - if (!holder->HasFastProperties()) break; - if (!info->IsCompatibleReceiver(*receiver)) break; - return compiler.CompileStoreCallback(receiver, holder, name, info); - } else if (callback->IsAccessorPair()) { - Handle<Object> setter( - Handle<AccessorPair>::cast(callback)->setter(), isolate()); - if (!setter->IsJSFunction()) break; - if (holder->IsGlobalObject()) break; - if (!holder->HasFastProperties()) break; - Handle<JSFunction> function = Handle<JSFunction>::cast(setter); - CallOptimization call_optimization(function); - if (call_optimization.is_simple_api_call() && - call_optimization.IsCompatibleReceiver(receiver, holder)) { - return compiler.CompileStoreCallback( - receiver, holder, name, call_optimization); + } else { + switch (lookup->type()) { + case FIELD: + return compiler.CompileStoreField(receiver, lookup, name); + case NORMAL: + if (kind() == Code::KEYED_STORE_IC) break; + if (receiver->IsJSGlobalProxy() || receiver->IsGlobalObject()) { + // The stub generated for the global object picks the value directly + // from the property cell. So the property must be directly on the + // global object. + Handle<GlobalObject> global = receiver->IsJSGlobalProxy() + ? handle(GlobalObject::cast(receiver->GetPrototype())) + : Handle<GlobalObject>::cast(receiver); + Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate()); + Handle<HeapType> union_type = PropertyCell::UpdatedType(cell, value); + StoreGlobalStub stub( + isolate(), union_type->IsConstant(), receiver->IsJSGlobalProxy()); + Handle<Code> code = stub.GetCodeCopyFromTemplate(global, cell); + // TODO(verwaest): Move caching of these NORMAL stubs outside as well. + HeapObject::UpdateMapCodeCache(receiver, name, code); + return code; } - return compiler.CompileStoreViaSetter( - receiver, holder, name, Handle<JSFunction>::cast(setter)); - } - // TODO(dcarney): Handle correctly. - if (callback->IsDeclaredAccessorInfo()) break; - ASSERT(callback->IsForeign()); - - // Use specialized code for setting the length of arrays with fast - // properties. Slow properties might indicate redefinition of the length - // property. - if (receiver->IsJSArray() && - name->Equals(isolate()->heap()->length_string()) && - Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() && - receiver->HasFastProperties()) { - return compiler.CompileStoreArrayLength(receiver, lookup, name); + ASSERT(holder.is_identical_to(receiver)); + return isolate()->builtins()->StoreIC_Normal(); + case CALLBACKS: { + Handle<Object> callback(lookup->GetCallbackObject(), isolate()); + if (callback->IsExecutableAccessorInfo()) { + Handle<ExecutableAccessorInfo> info = + Handle<ExecutableAccessorInfo>::cast(callback); + if (v8::ToCData<Address>(info->setter()) == 0) break; + if (!holder->HasFastProperties()) break; + if (!info->IsCompatibleReceiver(*receiver)) break; + return compiler.CompileStoreCallback(receiver, holder, name, info); + } else if (callback->IsAccessorPair()) { + Handle<Object> setter( + Handle<AccessorPair>::cast(callback)->setter(), isolate()); + if (!setter->IsJSFunction()) break; + if (holder->IsGlobalObject()) break; + if (!holder->HasFastProperties()) break; + Handle<JSFunction> function = Handle<JSFunction>::cast(setter); + CallOptimization call_optimization(function); + if (call_optimization.is_simple_api_call() && + call_optimization.IsCompatibleReceiver(receiver, holder)) { + return compiler.CompileStoreCallback( + receiver, holder, name, call_optimization); + } + return compiler.CompileStoreViaSetter( + receiver, holder, name, Handle<JSFunction>::cast(setter)); + } + // TODO(dcarney): Handle correctly. + ASSERT(callback->IsDeclaredAccessorInfo()); + break; } - - // No IC support for old-style native accessors. - break; + case INTERCEPTOR: + if (kind() == Code::KEYED_STORE_IC) break; + ASSERT(HasInterceptorSetter(*holder)); + return compiler.CompileStoreInterceptor(receiver, name); + case CONSTANT: + break; + case NONEXISTENT: + case HANDLER: + UNREACHABLE(); + break; } - case INTERCEPTOR: - if (kind() == Code::KEYED_STORE_IC) break; - ASSERT(HasInterceptorSetter(*holder)); - return compiler.CompileStoreInterceptor(receiver, name); - case CONSTANT: - break; - case NONEXISTENT: - case HANDLER: - UNREACHABLE(); - break; } return slow_stub(); } @@ -1422,24 +1495,16 @@ Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver, } Handle<Map> receiver_map(receiver->map(), isolate()); - if (state() == UNINITIALIZED || state() == PREMONOMORPHIC) { - // Optimistically assume that ICs that haven't reached the MONOMORPHIC state - // yet will do so and stay there. - Handle<Map> monomorphic_map = ComputeTransitionedMap(receiver, store_mode); + MapHandleList target_receiver_maps; + TargetMaps(&target_receiver_maps); + if (target_receiver_maps.length() == 0) { + Handle<Map> monomorphic_map = + ComputeTransitionedMap(receiver_map, store_mode); store_mode = GetNonTransitioningStoreMode(store_mode); return isolate()->stub_cache()->ComputeKeyedStoreElement( monomorphic_map, strict_mode(), store_mode); } - MapHandleList target_receiver_maps; - target()->FindAllMaps(&target_receiver_maps); - if (target_receiver_maps.length() == 0) { - // In the case that there is a non-map-specific IC is installed (e.g. keyed - // stores into properties in dictionary mode), then there will be not - // receiver maps in the target. - return generic_stub(); - } - // There are several special cases where an IC that is MONOMORPHIC can still // transition to a different GetNonTransitioningStoreMode IC that handles a // superset of the original IC. Handle those here if the receiver map hasn't @@ -1450,7 +1515,8 @@ Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver, if (state() == MONOMORPHIC) { Handle<Map> transitioned_receiver_map = receiver_map; if (IsTransitionStoreMode(store_mode)) { - transitioned_receiver_map = ComputeTransitionedMap(receiver, store_mode); + transitioned_receiver_map = + ComputeTransitionedMap(receiver_map, store_mode); } if ((receiver_map.is_identical_to(previous_receiver_map) && IsTransitionStoreMode(store_mode)) || @@ -1482,7 +1548,7 @@ Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver, if (IsTransitionStoreMode(store_mode)) { Handle<Map> transitioned_receiver_map = - ComputeTransitionedMap(receiver, store_mode); + ComputeTransitionedMap(receiver_map, store_mode); map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps, transitioned_receiver_map); } @@ -1538,36 +1604,35 @@ Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver, Handle<Map> KeyedStoreIC::ComputeTransitionedMap( - Handle<JSObject> receiver, + Handle<Map> map, KeyedAccessStoreMode store_mode) { switch (store_mode) { case STORE_TRANSITION_SMI_TO_OBJECT: case STORE_TRANSITION_DOUBLE_TO_OBJECT: case STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT: case STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT: - return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS); + return Map::TransitionElementsTo(map, FAST_ELEMENTS); case STORE_TRANSITION_SMI_TO_DOUBLE: case STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE: - return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS); + return Map::TransitionElementsTo(map, FAST_DOUBLE_ELEMENTS); case STORE_TRANSITION_HOLEY_SMI_TO_OBJECT: case STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT: case STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT: case STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT: - return JSObject::GetElementsTransitionMap(receiver, - FAST_HOLEY_ELEMENTS); + return Map::TransitionElementsTo(map, FAST_HOLEY_ELEMENTS); case STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE: case STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE: - return JSObject::GetElementsTransitionMap(receiver, - FAST_HOLEY_DOUBLE_ELEMENTS); + return Map::TransitionElementsTo(map, FAST_HOLEY_DOUBLE_ELEMENTS); case STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS: - ASSERT(receiver->map()->has_external_array_elements()); + ASSERT(map->has_external_array_elements()); // Fall through case STORE_NO_TRANSITION_HANDLE_COW: case STANDARD_STORE: case STORE_AND_GROW_NO_TRANSITION: - return Handle<Map>(receiver->map(), isolate()); + return map; } - return Handle<Map>::null(); + UNREACHABLE(); + return MaybeHandle<Map>().ToHandleChecked(); } @@ -1584,9 +1649,7 @@ bool IsOutOfBoundsAccess(Handle<JSObject> receiver, KeyedAccessStoreMode KeyedStoreIC::GetStoreMode(Handle<JSObject> receiver, Handle<Object> key, Handle<Object> value) { - ASSERT(!key->ToSmi()->IsFailure()); - Smi* smi_key = NULL; - key->ToSmi()->To(&smi_key); + Handle<Smi> smi_key = Object::ToSmi(isolate(), key).ToHandleChecked(); int index = smi_key->value(); bool oob_access = IsOutOfBoundsAccess(receiver, index); // Don't consider this a growing store if the store would send the receiver to @@ -1659,34 +1722,39 @@ KeyedAccessStoreMode KeyedStoreIC::GetStoreMode(Handle<JSObject> receiver, } -MaybeObject* KeyedStoreIC::Store(Handle<Object> object, - Handle<Object> key, - Handle<Object> value) { +MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object, + Handle<Object> key, + Handle<Object> value) { if (MigrateDeprecated(object)) { - Handle<Object> result = Runtime::SetObjectProperty(isolate(), object, - key, - value, - NONE, - strict_mode()); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Runtime::SetObjectProperty( + isolate(), object, key, value, NONE, strict_mode()), + Object); + return result; } // Check for non-string values that can be converted into an // internalized string directly or is representable as a smi. key = TryConvertKey(key, isolate()); - MaybeObject* maybe_object = NULL; + Handle<Object> store_handle; Handle<Code> stub = generic_stub(); if (key->IsInternalizedString()) { - maybe_object = StoreIC::Store(object, - Handle<String>::cast(key), - value, - JSReceiver::MAY_BE_STORE_FROM_KEYED); - if (maybe_object->IsFailure()) return maybe_object; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + store_handle, + StoreIC::Store(object, + Handle<String>::cast(key), + value, + JSReceiver::MAY_BE_STORE_FROM_KEYED), + Object); } else { bool use_ic = FLAG_use_ic && + !object->IsStringWrapper() && !object->IsAccessCheckNeeded() && !object->IsJSGlobalProxy() && !(object->IsJSObject() && @@ -1704,7 +1772,7 @@ MaybeObject* KeyedStoreIC::Store(Handle<Object> object, if (object->IsJSObject()) { Handle<JSObject> receiver = Handle<JSObject>::cast(object); - bool key_is_smi_like = key->IsSmi() || !key->ToSmi()->IsFailure(); + bool key_is_smi_like = !Object::ToSmi(isolate(), key).is_null(); if (receiver->elements()->map() == isolate()->heap()->sloppy_arguments_elements_map()) { if (strict_mode() == SLOPPY) { @@ -1735,13 +1803,56 @@ MaybeObject* KeyedStoreIC::Store(Handle<Object> object, TRACE_IC("StoreIC", key); } - if (maybe_object) return maybe_object; - Handle<Object> result = Runtime::SetObjectProperty(isolate(), object, key, - value, - NONE, - strict_mode()); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - return *result; + if (!store_handle.is_null()) return store_handle; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Runtime::SetObjectProperty( + isolate(), object, key, value, NONE, strict_mode()), + Object); + return result; +} + + +CallIC::State::State(ExtraICState extra_ic_state) + : argc_(ArgcBits::decode(extra_ic_state)), + call_type_(CallTypeBits::decode(extra_ic_state)) { +} + + +ExtraICState CallIC::State::GetExtraICState() const { + ExtraICState extra_ic_state = + ArgcBits::encode(argc_) | + CallTypeBits::encode(call_type_); + return extra_ic_state; +} + + +void CallIC::HandleMiss(Handle<Object> receiver, + Handle<Object> function, + Handle<FixedArray> vector, + Handle<Smi> slot) { + State state(target()->extra_ic_state()); + Object* feedback = vector->get(slot->value()); + + if (feedback->IsJSFunction() || !function->IsJSFunction()) { + // We are going generic. + ASSERT(!function->IsJSFunction() || *function != feedback); + + vector->set(slot->value(), + *TypeFeedbackInfo::MegamorphicSentinel(isolate()), + SKIP_WRITE_BARRIER); + TRACE_GENERIC_IC(isolate(), "CallIC", "megamorphic"); + } else { + // If we came here feedback must be the uninitialized sentinel, + // and we are going monomorphic. + ASSERT(feedback == *TypeFeedbackInfo::UninitializedSentinel(isolate())); + Handle<JSFunction> js_function = Handle<JSFunction>::cast(function); + Handle<Object> name(js_function->shared()->name(), isolate()); + TRACE_IC("CallIC", name); + vector->set(slot->value(), *function); + } } @@ -1753,65 +1864,94 @@ MaybeObject* KeyedStoreIC::Store(Handle<Object> object, // // Used from ic-<arch>.cc. +RUNTIME_FUNCTION(CallIC_Miss) { + HandleScope scope(isolate); + ASSERT(args.length() == 4); + CallIC ic(isolate); + Handle<Object> receiver = args.at<Object>(0); + Handle<Object> function = args.at<Object>(1); + Handle<FixedArray> vector = args.at<FixedArray>(2); + Handle<Smi> slot = args.at<Smi>(3); + ic.HandleMiss(receiver, function, vector, slot); + return *function; +} + + // Used from ic-<arch>.cc. -RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) { +RUNTIME_FUNCTION(LoadIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 2); LoadIC ic(IC::NO_EXTRA_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<String> key = args.at<String>(1); ic.UpdateState(receiver, key); - return ic.Load(receiver, key); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, ic.Load(receiver, key)); + return *result; } // Used from ic-<arch>.cc -RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) { +RUNTIME_FUNCTION(KeyedLoadIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 2); KeyedLoadIC ic(IC::NO_EXTRA_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<Object> key = args.at<Object>(1); ic.UpdateState(receiver, key); - return ic.Load(receiver, key); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, ic.Load(receiver, key)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure) { +RUNTIME_FUNCTION(KeyedLoadIC_MissFromStubFailure) { HandleScope scope(isolate); ASSERT(args.length() == 2); KeyedLoadIC ic(IC::EXTRA_CALL_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<Object> key = args.at<Object>(1); ic.UpdateState(receiver, key); - return ic.Load(receiver, key); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, ic.Load(receiver, key)); + return *result; } // Used from ic-<arch>.cc. -RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) { +RUNTIME_FUNCTION(StoreIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 3); StoreIC ic(IC::NO_EXTRA_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<String> key = args.at<String>(1); ic.UpdateState(receiver, key); - return ic.Store(receiver, key, args.at<Object>(2)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Store(receiver, key, args.at<Object>(2))); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure) { +RUNTIME_FUNCTION(StoreIC_MissFromStubFailure) { HandleScope scope(isolate); ASSERT(args.length() == 3); StoreIC ic(IC::EXTRA_CALL_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<String> key = args.at<String>(1); ic.UpdateState(receiver, key); - return ic.Store(receiver, key, args.at<Object>(2)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Store(receiver, key, args.at<Object>(2))); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) { +RUNTIME_FUNCTION(StoreIC_ArrayLength) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -1824,12 +1964,12 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) { #ifdef DEBUG // The length property has to be a writable callback property. LookupResult debug_lookup(isolate); - receiver->LocalLookup(isolate->heap()->length_string(), &debug_lookup); + receiver->LocalLookup(isolate->factory()->length_string(), &debug_lookup); ASSERT(debug_lookup.IsPropertyCallbacks() && !debug_lookup.IsReadOnly()); #endif - RETURN_IF_EMPTY_HANDLE(isolate, - JSArray::SetElementsLength(receiver, len)); + RETURN_FAILURE_ON_EXCEPTION( + isolate, JSArray::SetElementsLength(receiver, len)); return *len; } @@ -1837,74 +1977,79 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) { // Extend storage is called in a store inline cache when // it is necessary to extend the properties array of a // JSObject. -RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(SharedStoreIC_ExtendStorage) { + HandleScope shs(isolate); ASSERT(args.length() == 3); // Convert the parameters - JSObject* object = JSObject::cast(args[0]); - Map* transition = Map::cast(args[1]); - Object* value = args[2]; + Handle<JSObject> object = args.at<JSObject>(0); + Handle<Map> transition = args.at<Map>(1); + Handle<Object> value = args.at<Object>(2); // Check the object has run out out property space. ASSERT(object->HasFastProperties()); ASSERT(object->map()->unused_property_fields() == 0); // Expand the properties array. - FixedArray* old_storage = object->properties(); + Handle<FixedArray> old_storage = handle(object->properties(), isolate); int new_unused = transition->unused_property_fields(); int new_size = old_storage->length() + new_unused + 1; - Object* result; - MaybeObject* maybe_result = old_storage->CopySize(new_size); - if (!maybe_result->ToObject(&result)) return maybe_result; - FixedArray* new_storage = FixedArray::cast(result); + Handle<FixedArray> new_storage = FixedArray::CopySize(old_storage, new_size); - Object* to_store = value; + Handle<Object> to_store = value; - DescriptorArray* descriptors = transition->instance_descriptors(); - PropertyDetails details = descriptors->GetDetails(transition->LastAdded()); + PropertyDetails details = transition->instance_descriptors()->GetDetails( + transition->LastAdded()); if (details.representation().IsDouble()) { - MaybeObject* maybe_storage = - isolate->heap()->AllocateHeapNumber(value->Number()); - if (!maybe_storage->To(&to_store)) return maybe_storage; + to_store = isolate->factory()->NewHeapNumber(value->Number()); } - new_storage->set(old_storage->length(), to_store); + new_storage->set(old_storage->length(), *to_store); // Set the new property value and do the map transition. - object->set_properties(new_storage); - object->set_map(transition); + object->set_properties(*new_storage); + object->set_map(*transition); // Return the stored value. - return value; + return *value; } // Used from ic-<arch>.cc. -RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) { +RUNTIME_FUNCTION(KeyedStoreIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 3); KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<Object> key = args.at<Object>(1); ic.UpdateState(receiver, key); - return ic.Store(receiver, key, args.at<Object>(2)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Store(receiver, key, args.at<Object>(2))); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure) { +RUNTIME_FUNCTION(KeyedStoreIC_MissFromStubFailure) { HandleScope scope(isolate); ASSERT(args.length() == 3); KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate); Handle<Object> receiver = args.at<Object>(0); Handle<Object> key = args.at<Object>(1); ic.UpdateState(receiver, key); - return ic.Store(receiver, key, args.at<Object>(2)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Store(receiver, key, args.at<Object>(2))); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, StoreIC_Slow) { +RUNTIME_FUNCTION(StoreIC_Slow) { HandleScope scope(isolate); ASSERT(args.length() == 3); StoreIC ic(IC::NO_EXTRA_FRAME, isolate); @@ -1912,16 +2057,16 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_Slow) { Handle<Object> key = args.at<Object>(1); Handle<Object> value = args.at<Object>(2); StrictMode strict_mode = ic.strict_mode(); - Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key, - value, - NONE, - strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::SetObjectProperty( + isolate, object, key, value, NONE, strict_mode)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { +RUNTIME_FUNCTION(KeyedStoreIC_Slow) { HandleScope scope(isolate); ASSERT(args.length() == 3); KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate); @@ -1929,16 +2074,16 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { Handle<Object> key = args.at<Object>(1); Handle<Object> value = args.at<Object>(2); StrictMode strict_mode = ic.strict_mode(); - Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key, - value, - NONE, - strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::SetObjectProperty( + isolate, object, key, value, NONE, strict_mode)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss) { +RUNTIME_FUNCTION(ElementsTransitionAndStoreIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 4); KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate); @@ -1951,16 +2096,17 @@ RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss) { JSObject::TransitionElementsKind(Handle<JSObject>::cast(object), map->elements_kind()); } - Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key, - value, - NONE, - strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::SetObjectProperty( + isolate, object, key, value, NONE, strict_mode)); return *result; } -BinaryOpIC::State::State(ExtraICState extra_ic_state) { +BinaryOpIC::State::State(Isolate* isolate, ExtraICState extra_ic_state) + : isolate_(isolate) { // We don't deserialize the SSE2 Field, since this is only used to be able // to include SSE2 as well as non-SSE2 versions in the snapshot. For code // generation we always want it to reflect the current state. @@ -1984,7 +2130,7 @@ BinaryOpIC::State::State(ExtraICState extra_ic_state) { ExtraICState BinaryOpIC::State::GetExtraICState() const { bool sse2 = (Max(result_kind_, Max(left_kind_, right_kind_)) > SMI && - CpuFeatures::IsSafeForSnapshot(SSE2)); + CpuFeatures::IsSafeForSnapshot(isolate(), SSE2)); ExtraICState extra_ic_state = SSE2Field::encode(sse2) | OpField::encode(op_ - FIRST_TOKEN) | @@ -2011,7 +2157,7 @@ void BinaryOpIC::State::GenerateAheadOfTime( // Generated list of commonly used stubs #define GENERATE(op, left_kind, right_kind, result_kind, mode) \ do { \ - State state(op, mode); \ + State state(isolate, op, mode); \ state.left_kind_ = left_kind; \ state.fixed_right_arg_.has_value = false; \ state.right_kind_ = right_kind; \ @@ -2206,7 +2352,7 @@ void BinaryOpIC::State::GenerateAheadOfTime( #undef GENERATE #define GENERATE(op, left_kind, fixed_right_arg_value, result_kind, mode) \ do { \ - State state(op, mode); \ + State state(isolate, op, mode); \ state.left_kind_ = left_kind; \ state.fixed_right_arg_.has_value = true; \ state.fixed_right_arg_.value = fixed_right_arg_value; \ @@ -2380,25 +2526,28 @@ Type* BinaryOpIC::State::KindToType(Kind kind, Zone* zone) { } -MaybeObject* BinaryOpIC::Transition(Handle<AllocationSite> allocation_site, - Handle<Object> left, - Handle<Object> right) { - State state(target()->extra_ic_state()); +MaybeHandle<Object> BinaryOpIC::Transition( + Handle<AllocationSite> allocation_site, + Handle<Object> left, + Handle<Object> right) { + State state(isolate(), target()->extra_ic_state()); // Compute the actual result using the builtin for the binary operation. Object* builtin = isolate()->js_builtins_object()->javascript_builtin( TokenToJSBuiltin(state.op())); Handle<JSFunction> function = handle(JSFunction::cast(builtin), isolate()); - bool caught_exception; - Handle<Object> result = Execution::Call( - isolate(), function, left, 1, &right, &caught_exception); - if (caught_exception) return Failure::Exception(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate(), + result, + Execution::Call(isolate(), function, left, 1, &right), + Object); // Execution::Call can execute arbitrary JavaScript, hence potentially // update the state of this very IC, so we must update the stored state. UpdateTarget(); // Compute the new state. - State old_state(target()->extra_ic_state()); + State old_state(isolate(), target()->extra_ic_state()); state.Update(left, right, result); // Check if we have a string operation here. @@ -2410,15 +2559,15 @@ MaybeObject* BinaryOpIC::Transition(Handle<AllocationSite> allocation_site, } // Install the stub with an allocation site. - BinaryOpICWithAllocationSiteStub stub(state); - target = stub.GetCodeCopyFromTemplate(isolate(), allocation_site); + BinaryOpICWithAllocationSiteStub stub(isolate(), state); + target = stub.GetCodeCopyFromTemplate(allocation_site); // Sanity check the trampoline stub. ASSERT_EQ(*allocation_site, target->FindFirstAllocationSite()); } else { // Install the generic stub. - BinaryOpICStub stub(state); - target = stub.GetCode(isolate()); + BinaryOpICStub stub(isolate(), state); + target = stub.GetCode(); // Sanity check the generic stub. ASSERT_EQ(NULL, target->FindFirstAllocationSite()); @@ -2450,21 +2599,26 @@ MaybeObject* BinaryOpIC::Transition(Handle<AllocationSite> allocation_site, PatchInlinedSmiCode(address(), DISABLE_INLINED_SMI_CHECK); } - return *result; + return result; } -RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss) { +RUNTIME_FUNCTION(BinaryOpIC_Miss) { HandleScope scope(isolate); ASSERT_EQ(2, args.length()); Handle<Object> left = args.at<Object>(BinaryOpICStub::kLeft); Handle<Object> right = args.at<Object>(BinaryOpICStub::kRight); BinaryOpIC ic(isolate); - return ic.Transition(Handle<AllocationSite>::null(), left, right); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Transition(Handle<AllocationSite>::null(), left, right)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_MissWithAllocationSite) { +RUNTIME_FUNCTION(BinaryOpIC_MissWithAllocationSite) { HandleScope scope(isolate); ASSERT_EQ(3, args.length()); Handle<AllocationSite> allocation_site = args.at<AllocationSite>( @@ -2474,21 +2628,26 @@ RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_MissWithAllocationSite) { Handle<Object> right = args.at<Object>( BinaryOpWithAllocationSiteStub::kRight); BinaryOpIC ic(isolate); - return ic.Transition(allocation_site, left, right); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, + result, + ic.Transition(allocation_site, left, right)); + return *result; } Code* CompareIC::GetRawUninitialized(Isolate* isolate, Token::Value op) { - ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED); + ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED); Code* code = NULL; - CHECK(stub.FindCodeInCache(&code, isolate)); + CHECK(stub.FindCodeInCache(&code)); return code; } Handle<Code> CompareIC::GetUninitialized(Isolate* isolate, Token::Value op) { - ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED); - return stub.GetCode(isolate); + ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED); + return stub.GetCode(); } @@ -2660,12 +2819,12 @@ Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { State new_right = NewInputState(previous_right, y); State state = TargetState(previous_state, previous_left, previous_right, HasInlinedSmiCode(address()), x, y); - ICCompareStub stub(op_, new_left, new_right, state); + ICCompareStub stub(isolate(), op_, new_left, new_right, state); if (state == KNOWN_OBJECT) { stub.set_known_map( Handle<Map>(Handle<JSObject>::cast(x)->map(), isolate())); } - Handle<Code> new_target = stub.GetCode(isolate()); + Handle<Code> new_target = stub.GetCode(); set_target(*new_target); if (FLAG_trace_ic) { @@ -2679,7 +2838,7 @@ Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { GetStateName(new_right), GetStateName(state), Token::Name(op_), - static_cast<void*>(*stub.GetCode(isolate()))); + static_cast<void*>(*stub.GetCode())); } // Activate inlined smi code. @@ -2692,7 +2851,7 @@ Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { // Used from ICCompareStub::GenerateMiss in code-stubs-<arch>.cc. -RUNTIME_FUNCTION(Code*, CompareIC_Miss) { +RUNTIME_FUNCTION(CompareIC_Miss) { HandleScope scope(isolate); ASSERT(args.length() == 3); CompareIC ic(isolate, static_cast<Token::Value>(args.smi_at(2))); @@ -2706,29 +2865,32 @@ void CompareNilIC::Clear(Address address, if (IsCleared(target)) return; ExtraICState state = target->extra_ic_state(); - CompareNilICStub stub(state, HydrogenCodeStub::UNINITIALIZED); + CompareNilICStub stub(target->GetIsolate(), + state, + HydrogenCodeStub::UNINITIALIZED); stub.ClearState(); Code* code = NULL; - CHECK(stub.FindCodeInCache(&code, target->GetIsolate())); + CHECK(stub.FindCodeInCache(&code)); SetTargetAtAddress(address, code, constant_pool); } -MaybeObject* CompareNilIC::DoCompareNilSlow(NilValue nil, - Handle<Object> object) { +Handle<Object> CompareNilIC::DoCompareNilSlow(Isolate* isolate, + NilValue nil, + Handle<Object> object) { if (object->IsNull() || object->IsUndefined()) { - return Smi::FromInt(true); + return handle(Smi::FromInt(true), isolate); } - return Smi::FromInt(object->IsUndetectableObject()); + return handle(Smi::FromInt(object->IsUndetectableObject()), isolate); } -MaybeObject* CompareNilIC::CompareNil(Handle<Object> object) { +Handle<Object> CompareNilIC::CompareNil(Handle<Object> object) { ExtraICState extra_ic_state = target()->extra_ic_state(); - CompareNilICStub stub(extra_ic_state); + CompareNilICStub stub(isolate(), extra_ic_state); // Extract the current supported types from the patched IC and calculate what // types must be supported as a result of the miss. @@ -2741,27 +2903,27 @@ MaybeObject* CompareNilIC::CompareNil(Handle<Object> object) { // Find or create the specialized stub to support the new set of types. Handle<Code> code; if (stub.IsMonomorphic()) { - Handle<Map> monomorphic_map(already_monomorphic - ? target()->FindFirstMap() + Handle<Map> monomorphic_map(already_monomorphic && FirstTargetMap() != NULL + ? FirstTargetMap() : HeapObject::cast(*object)->map()); code = isolate()->stub_cache()->ComputeCompareNil(monomorphic_map, stub); } else { - code = stub.GetCode(isolate()); + code = stub.GetCode(); } set_target(*code); - return DoCompareNilSlow(nil, object); + return DoCompareNilSlow(isolate(), nil, object); } -RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss) { +RUNTIME_FUNCTION(CompareNilIC_Miss) { HandleScope scope(isolate); Handle<Object> object = args.at<Object>(0); CompareNilIC ic(isolate); - return ic.CompareNil(object); + return *ic.CompareNil(object); } -RUNTIME_FUNCTION(MaybeObject*, Unreachable) { +RUNTIME_FUNCTION(Unreachable) { UNREACHABLE(); CHECK(false); return isolate->heap()->undefined_value(); @@ -2809,21 +2971,21 @@ Builtins::JavaScript BinaryOpIC::TokenToJSBuiltin(Token::Value op) { } -MaybeObject* ToBooleanIC::ToBoolean(Handle<Object> object) { - ToBooleanStub stub(target()->extra_ic_state()); +Handle<Object> ToBooleanIC::ToBoolean(Handle<Object> object) { + ToBooleanStub stub(isolate(), target()->extra_ic_state()); bool to_boolean_value = stub.UpdateStatus(object); - Handle<Code> code = stub.GetCode(isolate()); + Handle<Code> code = stub.GetCode(); set_target(*code); - return Smi::FromInt(to_boolean_value ? 1 : 0); + return handle(Smi::FromInt(to_boolean_value ? 1 : 0), isolate()); } -RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss) { +RUNTIME_FUNCTION(ToBooleanIC_Miss) { ASSERT(args.length() == 1); HandleScope scope(isolate); Handle<Object> object = args.at<Object>(0); ToBooleanIC ic(isolate); - return ic.ToBoolean(object); + return *ic.ToBoolean(object); } diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h index e70cb82c9..895c21e73 100644 --- a/deps/v8/src/ic.h +++ b/deps/v8/src/ic.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_IC_H_ #define V8_IC_H_ @@ -42,6 +19,7 @@ const int kMaxKeyedPolymorphism = 4; #define IC_UTIL_LIST(ICU) \ ICU(LoadIC_Miss) \ ICU(KeyedLoadIC_Miss) \ + ICU(CallIC_Miss) \ ICU(StoreIC_Miss) \ ICU(StoreIC_ArrayLength) \ ICU(StoreIC_Slow) \ @@ -96,10 +74,24 @@ class IC { // Compute the current IC state based on the target stub, receiver and name. void UpdateState(Handle<Object> receiver, Handle<Object> name); - void MarkMonomorphicPrototypeFailure() { - state_ = MONOMORPHIC_PROTOTYPE_FAILURE; + + bool IsNameCompatibleWithMonomorphicPrototypeFailure(Handle<Object> name); + bool TryMarkMonomorphicPrototypeFailure(Handle<Object> name) { + if (IsNameCompatibleWithMonomorphicPrototypeFailure(name)) { + state_ = MONOMORPHIC_PROTOTYPE_FAILURE; + return true; + } + return false; } + // If the stub contains weak maps then this function adds the stub to + // the dependent code array of each weak map. + static void RegisterWeakMapDependency(Handle<Code> stub); + + // This function is called when a weak map in the stub is dying, + // invalidates the stub by setting maps in it to undefined. + static void InvalidateMaps(Code* stub); + // Clear the inline cache to initial state. static void Clear(Isolate* isolate, Address address, @@ -113,6 +105,10 @@ class IC { bool IsStoreStub() const { return target()->is_store_stub() || target()->is_keyed_store_stub(); } + + bool IsCallStub() const { + return target()->is_call_stub(); + } #endif // Determines which map must be used for keeping the code stub. @@ -156,17 +152,18 @@ class IC { Address pc() const { return *pc_address_; } Isolate* isolate() const { return isolate_; } -#ifdef ENABLE_DEBUGGER_SUPPORT // Get the shared function info of the caller. SharedFunctionInfo* GetSharedFunctionInfo() const; // Get the code object of the caller. Code* GetCode() const; // Get the original (non-breakpointed) code object of the caller. Code* GetOriginalCode() const; -#endif // Set the call-site target. void set_target(Code* code) { +#ifdef VERIFY_HEAP + code->VerifyEmbeddedObjectsDependency(); +#endif SetTargetAtAddress(address(), code, constant_pool()); target_set_ = true; } @@ -179,10 +176,10 @@ class IC { void TraceIC(const char* type, Handle<Object> name); #endif - Failure* TypeError(const char* type, - Handle<Object> object, - Handle<Object> key); - Failure* ReferenceError(const char* type, Handle<String> name); + MaybeHandle<Object> TypeError(const char* type, + Handle<Object> object, + Handle<Object> key); + MaybeHandle<Object> ReferenceError(const char* type, Handle<String> name); // Access the target code for the given IC address. static inline Code* GetTargetAtAddress(Address address, @@ -247,6 +244,25 @@ class IC { extra_ic_state_ = state; } + void TargetMaps(MapHandleList* list) { + FindTargetMaps(); + for (int i = 0; i < target_maps_.length(); i++) { + list->Add(target_maps_.at(i)); + } + } + + void TargetTypes(TypeHandleList* list) { + FindTargetMaps(); + for (int i = 0; i < target_maps_.length(); i++) { + list->Add(IC::MapToType<HeapType>(target_maps_.at(i), isolate_)); + } + } + + Map* FirstTargetMap() { + FindTargetMaps(); + return target_maps_.length() > 0 ? *target_maps_.at(0) : NULL; + } + protected: void UpdateTarget() { target_ = handle(raw_target(), isolate_); @@ -259,6 +275,17 @@ class IC { inline ConstantPoolArray* constant_pool() const; inline ConstantPoolArray* raw_constant_pool() const; + void FindTargetMaps() { + if (target_maps_set_) return; + target_maps_set_ = true; + if (state_ == MONOMORPHIC) { + Map* map = target_->FindFirstMap(); + if (map != NULL) target_maps_.Add(handle(map)); + } else if (state_ != UNINITIALIZED && state_ != PREMONOMORPHIC) { + target_->FindAllMaps(&target_maps_); + } + } + // Frame pointer for the frame that uses (calls) the IC. Address fp_; @@ -280,6 +307,8 @@ class IC { bool target_set_; ExtraICState extra_ic_state_; + MapHandleList target_maps_; + bool target_maps_set_; DISALLOW_IMPLICIT_CONSTRUCTORS(IC); }; @@ -301,6 +330,78 @@ class IC_Utility { }; +class CallIC: public IC { + public: + enum CallType { METHOD, FUNCTION }; + + class State V8_FINAL BASE_EMBEDDED { + public: + explicit State(ExtraICState extra_ic_state); + + static State DefaultCallState(int argc, CallType call_type) { + return State(argc, call_type); + } + + static State MegamorphicCallState(int argc, CallType call_type) { + return State(argc, call_type); + } + + InlineCacheState GetICState() const { return ::v8::internal::GENERIC; } + + ExtraICState GetExtraICState() const; + + static void GenerateAheadOfTime( + Isolate*, void (*Generate)(Isolate*, const State&)); + + int arg_count() const { return argc_; } + CallType call_type() const { return call_type_; } + + bool CallAsMethod() const { return call_type_ == METHOD; } + + void Print(StringStream* stream) const; + + bool operator==(const State& other_state) const { + return (argc_ == other_state.argc_ && + call_type_ == other_state.call_type_); + } + + bool operator!=(const State& other_state) const { + return !(*this == other_state); + } + + private: + State(int argc, + CallType call_type) + : argc_(argc), + call_type_(call_type) { + } + + class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {}; + class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {}; + + const int argc_; + const CallType call_type_; + }; + + explicit CallIC(Isolate* isolate) + : IC(EXTRA_CALL_FRAME, isolate) { + } + + void HandleMiss(Handle<Object> receiver, + Handle<Object> function, + Handle<FixedArray> vector, + Handle<Smi> slot); + + // Code generator routines. + static Handle<Code> initialize_stub(Isolate* isolate, + int argc, + CallType call_type); + + static void Clear(Isolate* isolate, Address address, Code* target, + ConstantPoolArray* constant_pool); +}; + + class LoadIC: public IC { public: // ExtraICState bits @@ -348,8 +449,8 @@ class LoadIC: public IC { static Handle<Code> initialize_stub(Isolate* isolate, ExtraICState extra_state); - MUST_USE_RESULT MaybeObject* Load(Handle<Object> object, - Handle<String> name); + MUST_USE_RESULT MaybeHandle<Object> Load(Handle<Object> object, + Handle<String> name); protected: virtual Code::Kind kind() const { return Code::LOAD_IC; } @@ -410,8 +511,8 @@ class KeyedLoadIC: public LoadIC { ASSERT(target()->is_keyed_load_stub()); } - MUST_USE_RESULT MaybeObject* Load(Handle<Object> object, - Handle<Object> key); + MUST_USE_RESULT MaybeHandle<Object> Load(Handle<Object> object, + Handle<Object> key); // Code generator routines. static void GenerateMiss(MacroAssembler* masm); @@ -515,7 +616,7 @@ class StoreIC: public IC { static Handle<Code> initialize_stub(Isolate* isolate, StrictMode strict_mode); - MUST_USE_RESULT MaybeObject* Store( + MUST_USE_RESULT MaybeHandle<Object> Store( Handle<Object> object, Handle<String> name, Handle<Object> value, @@ -604,9 +705,9 @@ class KeyedStoreIC: public StoreIC { ASSERT(target()->is_keyed_store_stub()); } - MUST_USE_RESULT MaybeObject* Store(Handle<Object> object, - Handle<Object> name, - Handle<Object> value); + MUST_USE_RESULT MaybeHandle<Object> Store(Handle<Object> object, + Handle<Object> name, + Handle<Object> value); // Code generators for stub routines. Only called once at startup. static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } @@ -679,7 +780,7 @@ class KeyedStoreIC: public StoreIC { Handle<Object> key, Handle<Object> value); - Handle<Map> ComputeTransitionedMap(Handle<JSObject> receiver, + Handle<Map> ComputeTransitionedMap(Handle<Map> map, KeyedAccessStoreMode store_mode); friend class IC; @@ -694,11 +795,11 @@ class BinaryOpIC: public IC { public: class State V8_FINAL BASE_EMBEDDED { public: - explicit State(ExtraICState extra_ic_state); + State(Isolate* isolate, ExtraICState extra_ic_state); - State(Token::Value op, OverwriteMode mode) + State(Isolate* isolate, Token::Value op, OverwriteMode mode) : op_(op), mode_(mode), left_kind_(NONE), right_kind_(NONE), - result_kind_(NONE) { + result_kind_(NONE), isolate_(isolate) { ASSERT_LE(FIRST_TOKEN, op); ASSERT_LE(op, LAST_TOKEN); } @@ -775,6 +876,8 @@ class BinaryOpIC: public IC { Handle<Object> right, Handle<Object> result); + Isolate* isolate() const { return isolate_; } + private: enum Kind { NONE, SMI, INT32, NUMBER, STRING, GENERIC }; @@ -805,15 +908,16 @@ class BinaryOpIC: public IC { Kind right_kind_; Kind result_kind_; Maybe<int> fixed_right_arg_; + Isolate* isolate_; }; explicit BinaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { } static Builtins::JavaScript TokenToJSBuiltin(Token::Value op); - MaybeObject* Transition(Handle<AllocationSite> allocation_site, - Handle<Object> left, - Handle<Object> right) V8_WARN_UNUSED_RESULT; + MaybeHandle<Object> Transition(Handle<AllocationSite> allocation_site, + Handle<Object> left, + Handle<Object> right) V8_WARN_UNUSED_RESULT; }; @@ -895,7 +999,7 @@ class CompareNilIC: public IC { public: explicit CompareNilIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) {} - MUST_USE_RESULT MaybeObject* CompareNil(Handle<Object> object); + Handle<Object> CompareNil(Handle<Object> object); static Handle<Code> GetUninitialized(); @@ -903,8 +1007,8 @@ class CompareNilIC: public IC { Code* target, ConstantPoolArray* constant_pool); - static MUST_USE_RESULT MaybeObject* DoCompareNilSlow(NilValue nil, - Handle<Object> object); + static Handle<Object> DoCompareNilSlow(Isolate* isolate, NilValue nil, + Handle<Object> object); }; @@ -912,7 +1016,7 @@ class ToBooleanIC: public IC { public: explicit ToBooleanIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { } - MaybeObject* ToBoolean(Handle<Object> object); + Handle<Object> ToBoolean(Handle<Object> object); }; @@ -920,15 +1024,15 @@ class ToBooleanIC: public IC { enum InlinedSmiCheck { ENABLE_INLINED_SMI_CHECK, DISABLE_INLINED_SMI_CHECK }; void PatchInlinedSmiCode(Address address, InlinedSmiCheck check); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_MissWithAllocationSite); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss); +DECLARE_RUNTIME_FUNCTION(KeyedLoadIC_MissFromStubFailure); +DECLARE_RUNTIME_FUNCTION(KeyedStoreIC_MissFromStubFailure); +DECLARE_RUNTIME_FUNCTION(UnaryOpIC_Miss); +DECLARE_RUNTIME_FUNCTION(StoreIC_MissFromStubFailure); +DECLARE_RUNTIME_FUNCTION(ElementsTransitionAndStoreIC_Miss); +DECLARE_RUNTIME_FUNCTION(BinaryOpIC_Miss); +DECLARE_RUNTIME_FUNCTION(BinaryOpIC_MissWithAllocationSite); +DECLARE_RUNTIME_FUNCTION(CompareNilIC_Miss); +DECLARE_RUNTIME_FUNCTION(ToBooleanIC_Miss); } } // namespace v8::internal diff --git a/deps/v8/src/icu_util.cc b/deps/v8/src/icu_util.cc index 1fff8170f..b036ef4ee 100644 --- a/deps/v8/src/icu_util.cc +++ b/deps/v8/src/icu_util.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "icu_util.h" diff --git a/deps/v8/src/icu_util.h b/deps/v8/src/icu_util.h index 6b50c185c..cd98ff0df 100644 --- a/deps/v8/src/icu_util.h +++ b/deps/v8/src/icu_util.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ICU_UTIL_H_ diff --git a/deps/v8/src/incremental-marking-inl.h b/deps/v8/src/incremental-marking-inl.h index 1c30383d5..19d471c36 100644 --- a/deps/v8/src/incremental-marking-inl.h +++ b/deps/v8/src/incremental-marking-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_INCREMENTAL_MARKING_INL_H_ #define V8_INCREMENTAL_MARKING_INL_H_ @@ -68,7 +45,7 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, void IncrementalMarking::RecordWrite(HeapObject* obj, Object** slot, Object* value) { - if (IsMarking() && value->NonFailureIsHeapObject()) { + if (IsMarking() && value->IsHeapObject()) { RecordWriteSlow(obj, slot, value); } } @@ -84,7 +61,7 @@ void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, void IncrementalMarking::RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo, Object* value) { - if (IsMarking() && value->NonFailureIsHeapObject()) { + if (IsMarking() && value->IsHeapObject()) { RecordWriteIntoCodeSlow(obj, rinfo, value); } } diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc index bbe0c51a5..2b6765c72 100644 --- a/deps/v8/src/incremental-marking.cc +++ b/deps/v8/src/incremental-marking.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -31,9 +8,9 @@ #include "code-stubs.h" #include "compilation-cache.h" +#include "conversions.h" #include "objects-visiting.h" #include "objects-visiting-inl.h" -#include "v8conversions.h" namespace v8 { namespace internal { @@ -263,7 +240,7 @@ class IncrementalMarkingMarkingVisitor INLINE(static void VisitPointer(Heap* heap, Object** p)) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(p, p, obj); MarkObject(heap, obj); } @@ -272,7 +249,7 @@ class IncrementalMarkingMarkingVisitor INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { for (Object** p = start; p < end; p++) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(start, p, obj); MarkObject(heap, obj); } @@ -285,7 +262,7 @@ class IncrementalMarkingMarkingVisitor Object** end)) { for (Object** p = start; p < end; p++) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(anchor, p, obj); MarkObject(heap, obj); } @@ -482,7 +459,7 @@ bool IncrementalMarking::WorthActivating() { return FLAG_incremental_marking && FLAG_incremental_marking_steps && heap_->gc_state() == Heap::NOT_IN_GC && - !Serializer::enabled() && + !Serializer::enabled(heap_->isolate()) && heap_->isolate()->IsInitialized() && heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold; } @@ -560,12 +537,12 @@ void IncrementalMarking::Start(CompactionFlag flag) { ASSERT(FLAG_incremental_marking_steps); ASSERT(state_ == STOPPED); ASSERT(heap_->gc_state() == Heap::NOT_IN_GC); - ASSERT(!Serializer::enabled()); + ASSERT(!Serializer::enabled(heap_->isolate())); ASSERT(heap_->isolate()->IsInitialized()); ResetStepCounters(); - if (heap_->IsSweepingComplete()) { + if (!heap_->mark_compact_collector()->IsConcurrentSweepingInProgress()) { StartMarking(flag); } else { if (FLAG_trace_incremental_marking) { @@ -909,7 +886,11 @@ void IncrementalMarking::Step(intptr_t allocated_bytes, } if (state_ == SWEEPING) { - if (heap_->EnsureSweepersProgressed(static_cast<int>(bytes_to_process))) { + if (heap_->mark_compact_collector()->IsConcurrentSweepingInProgress() && + heap_->mark_compact_collector()->IsSweepingCompleted()) { + heap_->mark_compact_collector()->WaitUntilSweepingCompleted(); + } + if (!heap_->mark_compact_collector()->IsConcurrentSweepingInProgress()) { bytes_scanned_ = 0; StartMarking(PREVENT_COMPACTION); } diff --git a/deps/v8/src/incremental-marking.h b/deps/v8/src/incremental-marking.h index f4362ff5d..2ea30b17c 100644 --- a/deps/v8/src/incremental-marking.h +++ b/deps/v8/src/incremental-marking.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_INCREMENTAL_MARKING_H_ #define V8_INCREMENTAL_MARKING_H_ diff --git a/deps/v8/src/interface.cc b/deps/v8/src/interface.cc index c39d50e35..bd50c61ea 100644 --- a/deps/v8/src/interface.cc +++ b/deps/v8/src/interface.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/interface.h b/deps/v8/src/interface.h index f824a9a87..31a9fa0c4 100644 --- a/deps/v8/src/interface.h +++ b/deps/v8/src/interface.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_INTERFACE_H_ #define V8_INTERFACE_H_ diff --git a/deps/v8/src/interpreter-irregexp.cc b/deps/v8/src/interpreter-irregexp.cc index de54d0c42..4c7c04c13 100644 --- a/deps/v8/src/interpreter-irregexp.cc +++ b/deps/v8/src/interpreter-irregexp.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // A simple interpreter for the Irregexp byte code. diff --git a/deps/v8/src/interpreter-irregexp.h b/deps/v8/src/interpreter-irregexp.h index 0f45d9820..4953a601e 100644 --- a/deps/v8/src/interpreter-irregexp.h +++ b/deps/v8/src/interpreter-irregexp.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // A simple interpreter for the Irregexp byte code. diff --git a/deps/v8/src/isolate-inl.h b/deps/v8/src/isolate-inl.h index 764bcb8bf..eebdcee9b 100644 --- a/deps/v8/src/isolate-inl.h +++ b/deps/v8/src/isolate-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ISOLATE_INL_H_ #define V8_ISOLATE_INL_H_ @@ -54,21 +31,12 @@ bool Isolate::IsCodePreAgingActive() { bool Isolate::IsDebuggerActive() { -#ifdef ENABLE_DEBUGGER_SUPPORT - if (!NoBarrier_Load(&debugger_initialized_)) return false; return debugger()->IsDebuggerActive(); -#else - return false; -#endif } bool Isolate::DebuggerHasBreakPoints() { -#ifdef ENABLE_DEBUGGER_SUPPORT return debug()->has_break_points(); -#else - return false; -#endif } diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc index 7e06a2ed5..d93639b5d 100644 --- a/deps/v8/src/isolate.cc +++ b/deps/v8/src/isolate.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdlib.h> @@ -156,8 +133,8 @@ Isolate::PerIsolateThreadData* per_thread = new PerIsolateThreadData(this, thread_id); thread_data_table_->Insert(per_thread); } + ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread); } - ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread); return per_thread; } @@ -212,37 +189,6 @@ struct StaticInitializer { } } static_initializer; -#ifdef ENABLE_DEBUGGER_SUPPORT -Debugger* Isolate::GetDefaultIsolateDebugger() { - EnsureDefaultIsolate(); - return default_isolate_->debugger(); -} -#endif - - -StackGuard* Isolate::GetDefaultIsolateStackGuard() { - EnsureDefaultIsolate(); - return default_isolate_->stack_guard(); -} - - -void Isolate::EnterDefaultIsolate() { - EnsureDefaultIsolate(); - ASSERT(default_isolate_ != NULL); - - PerIsolateThreadData* data = CurrentPerIsolateThreadData(); - // If not yet in default isolate - enter it. - if (data == NULL || data->isolate() != default_isolate_) { - default_isolate_->Enter(); - } -} - - -v8::Isolate* Isolate::GetDefaultIsolateForLocking() { - EnsureDefaultIsolate(); - return reinterpret_cast<v8::Isolate*>(default_isolate_); -} - Address Isolate::get_address_from_id(Isolate::AddressId id) { return isolate_addresses_[id]; @@ -264,21 +210,11 @@ void Isolate::IterateThread(ThreadVisitor* v, char* t) { void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) { // Visit the roots from the top for a given thread. - Object* pending; - // The pending exception can sometimes be a failure. We can't show - // that to the GC, which only understands objects. - if (thread->pending_exception_->ToObject(&pending)) { - v->VisitPointer(&pending); - thread->pending_exception_ = pending; // In case GC updated it. - } + v->VisitPointer(&thread->pending_exception_); v->VisitPointer(&(thread->pending_message_obj_)); v->VisitPointer(BitCast<Object**>(&(thread->pending_message_script_))); v->VisitPointer(BitCast<Object**>(&(thread->context_))); - Object* scheduled; - if (thread->scheduled_exception_->ToObject(&scheduled)) { - v->VisitPointer(&scheduled); - thread->scheduled_exception_ = scheduled; - } + v->VisitPointer(&thread->scheduled_exception_); for (v8::TryCatch* block = thread->TryCatchHandler(); block != NULL; @@ -560,7 +496,7 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace( if (options & StackTrace::kLineNumber) { int script_line_offset = script->line_offset()->value(); int position = frames[i].code()->SourcePosition(frames[i].pc()); - int line_number = GetScriptLineNumber(script, position); + int line_number = Script::GetLineNumber(script, position); // line_number is already shifted by the script_line_offset. int relative_line_number = line_number - script_line_offset; if (options & StackTrace::kColumnOffset && relative_line_number >= 0) { @@ -573,41 +509,31 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace( // tag. column_offset += script->column_offset()->value(); } - CHECK_NOT_EMPTY_HANDLE( - this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, column_key, - Handle<Smi>(Smi::FromInt(column_offset + 1), this), NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, column_key, + Handle<Smi>(Smi::FromInt(column_offset + 1), this), NONE).Check(); } - CHECK_NOT_EMPTY_HANDLE( - this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, line_key, - Handle<Smi>(Smi::FromInt(line_number + 1), this), NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, line_key, + Handle<Smi>(Smi::FromInt(line_number + 1), this), NONE).Check(); } if (options & StackTrace::kScriptId) { Handle<Smi> script_id(script->id(), this); - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, script_id_key, script_id, - NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, script_id_key, script_id, NONE).Check(); } if (options & StackTrace::kScriptName) { Handle<Object> script_name(script->name(), this); - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, script_name_key, script_name, - NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, script_name_key, script_name, NONE).Check(); } if (options & StackTrace::kScriptNameOrSourceURL) { - Handle<Object> result = GetScriptNameOrSourceURL(script); - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, script_name_or_source_url_key, - result, NONE)); + Handle<Object> result = Script::GetNameOrSourceURL(script); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, script_name_or_source_url_key, result, NONE).Check(); } if (options & StackTrace::kFunctionName) { @@ -615,27 +541,23 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace( if (!fun_name->BooleanValue()) { fun_name = Handle<Object>(fun->shared()->inferred_name(), this); } - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, function_key, fun_name, NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, function_key, fun_name, NONE).Check(); } if (options & StackTrace::kIsEval) { Handle<Object> is_eval = script->compilation_type() == Script::COMPILATION_TYPE_EVAL ? factory()->true_value() : factory()->false_value(); - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, eval_key, is_eval, NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, eval_key, is_eval, NONE).Check(); } if (options & StackTrace::kIsConstructor) { Handle<Object> is_constructor = (frames[i].is_constructor()) ? factory()->true_value() : factory()->false_value(); - CHECK_NOT_EMPTY_HANDLE(this, - JSObject::SetLocalPropertyIgnoreAttributes( - stack_frame, constructor_key, - is_constructor, NONE)); + JSObject::SetLocalPropertyIgnoreAttributes( + stack_frame, constructor_key, is_constructor, NONE).Check(); } FixedArray::cast(stack_trace->elements())->set(frames_seen, *stack_frame); @@ -717,28 +639,41 @@ void Isolate::SetFailedAccessCheckCallback( } -void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) { +static inline AccessCheckInfo* GetAccessCheckInfo(Isolate* isolate, + Handle<JSObject> receiver) { + JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); + if (!constructor->shared()->IsApiFunction()) return NULL; + + Object* data_obj = + constructor->shared()->get_api_func_data()->access_check_info(); + if (data_obj == isolate->heap()->undefined_value()) return NULL; + + return AccessCheckInfo::cast(data_obj); +} + + +void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver, + v8::AccessType type) { if (!thread_local_top()->failed_access_check_callback_) return; ASSERT(receiver->IsAccessCheckNeeded()); ASSERT(context()); // Get the data object from access check info. - JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); - if (!constructor->shared()->IsApiFunction()) return; - Object* data_obj = - constructor->shared()->get_api_func_data()->access_check_info(); - if (data_obj == heap_.undefined_value()) return; - HandleScope scope(this); - Handle<JSObject> receiver_handle(receiver); - Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); - { VMState<EXTERNAL> state(this); - thread_local_top()->failed_access_check_callback_( - v8::Utils::ToLocal(receiver_handle), + Handle<Object> data; + { DisallowHeapAllocation no_gc; + AccessCheckInfo* access_check_info = GetAccessCheckInfo(this, receiver); + if (!access_check_info) return; + data = handle(access_check_info->data(), this); + } + + // Leaving JavaScript. + VMState<EXTERNAL> state(this); + thread_local_top()->failed_access_check_callback_( + v8::Utils::ToLocal(receiver), type, v8::Utils::ToLocal(data)); - } } @@ -748,13 +683,14 @@ enum MayAccessDecision { static MayAccessDecision MayAccessPreCheck(Isolate* isolate, - JSObject* receiver, + Handle<JSObject> receiver, v8::AccessType type) { + DisallowHeapAllocation no_gc; // During bootstrapping, callback functions are not enabled yet. if (isolate->bootstrapper()->IsActive()) return YES; if (receiver->IsJSGlobalProxy()) { - Object* receiver_context = JSGlobalProxy::cast(receiver)->native_context(); + Object* receiver_context = JSGlobalProxy::cast(*receiver)->native_context(); if (!receiver_context->IsContext()) return NO; // Get the native context of current top context. @@ -772,16 +708,14 @@ static MayAccessDecision MayAccessPreCheck(Isolate* isolate, } -bool Isolate::MayNamedAccess(JSObject* receiver, Object* key, +bool Isolate::MayNamedAccess(Handle<JSObject> receiver, + Handle<Object> key, v8::AccessType type) { ASSERT(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded()); - // The callers of this method are not expecting a GC. - DisallowHeapAllocation no_gc; - // Skip checks for hidden properties access. Note, we do not // require existence of a context in this case. - if (key == heap_.hidden_string()) return true; + if (key.is_identical_to(factory()->hidden_string())) return true; // Check for compatibility between the security tokens in the // current lexical context and the accessed object. @@ -790,39 +724,30 @@ bool Isolate::MayNamedAccess(JSObject* receiver, Object* key, MayAccessDecision decision = MayAccessPreCheck(this, receiver, type); if (decision != UNKNOWN) return decision == YES; - // Get named access check callback - JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); - if (!constructor->shared()->IsApiFunction()) return false; - - Object* data_obj = - constructor->shared()->get_api_func_data()->access_check_info(); - if (data_obj == heap_.undefined_value()) return false; - - Object* fun_obj = AccessCheckInfo::cast(data_obj)->named_callback(); - v8::NamedSecurityCallback callback = - v8::ToCData<v8::NamedSecurityCallback>(fun_obj); - - if (!callback) return false; - HandleScope scope(this); - Handle<JSObject> receiver_handle(receiver, this); - Handle<Object> key_handle(key, this); - Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); - LOG(this, ApiNamedSecurityCheck(key)); - bool result = false; - { - // Leaving JavaScript. - VMState<EXTERNAL> state(this); - result = callback(v8::Utils::ToLocal(receiver_handle), - v8::Utils::ToLocal(key_handle), - type, - v8::Utils::ToLocal(data)); + Handle<Object> data; + v8::NamedSecurityCallback callback; + { DisallowHeapAllocation no_gc; + AccessCheckInfo* access_check_info = GetAccessCheckInfo(this, receiver); + if (!access_check_info) return false; + Object* fun_obj = access_check_info->named_callback(); + callback = v8::ToCData<v8::NamedSecurityCallback>(fun_obj); + if (!callback) return false; + data = handle(access_check_info->data(), this); } - return result; + + LOG(this, ApiNamedSecurityCheck(*key)); + + // Leaving JavaScript. + VMState<EXTERNAL> state(this); + return callback(v8::Utils::ToLocal(receiver), + v8::Utils::ToLocal(key), + type, + v8::Utils::ToLocal(data)); } -bool Isolate::MayIndexedAccess(JSObject* receiver, +bool Isolate::MayIndexedAccess(Handle<JSObject> receiver, uint32_t index, v8::AccessType type) { ASSERT(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded()); @@ -833,34 +758,25 @@ bool Isolate::MayIndexedAccess(JSObject* receiver, MayAccessDecision decision = MayAccessPreCheck(this, receiver, type); if (decision != UNKNOWN) return decision == YES; - // Get indexed access check callback - JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); - if (!constructor->shared()->IsApiFunction()) return false; - - Object* data_obj = - constructor->shared()->get_api_func_data()->access_check_info(); - if (data_obj == heap_.undefined_value()) return false; - - Object* fun_obj = AccessCheckInfo::cast(data_obj)->indexed_callback(); - v8::IndexedSecurityCallback callback = - v8::ToCData<v8::IndexedSecurityCallback>(fun_obj); - - if (!callback) return false; - HandleScope scope(this); - Handle<JSObject> receiver_handle(receiver, this); - Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); - LOG(this, ApiIndexedSecurityCheck(index)); - bool result = false; - { - // Leaving JavaScript. - VMState<EXTERNAL> state(this); - result = callback(v8::Utils::ToLocal(receiver_handle), - index, - type, - v8::Utils::ToLocal(data)); + Handle<Object> data; + v8::IndexedSecurityCallback callback; + { DisallowHeapAllocation no_gc; + // Get named access check callback + AccessCheckInfo* access_check_info = GetAccessCheckInfo(this, receiver); + if (!access_check_info) return false; + Object* fun_obj = access_check_info->indexed_callback(); + callback = v8::ToCData<v8::IndexedSecurityCallback>(fun_obj); + if (!callback) return false; + data = handle(access_check_info->data(), this); } - return result; + + LOG(this, ApiIndexedSecurityCheck(index)); + + // Leaving JavaScript. + VMState<EXTERNAL> state(this); + return callback( + v8::Utils::ToLocal(receiver), index, type, v8::Utils::ToLocal(data)); } @@ -868,23 +784,29 @@ const char* const Isolate::kStackOverflowMessage = "Uncaught RangeError: Maximum call stack size exceeded"; -Failure* Isolate::StackOverflow() { +Object* Isolate::StackOverflow() { HandleScope scope(this); // At this point we cannot create an Error object using its javascript // constructor. Instead, we copy the pre-constructed boilerplate and // attach the stack trace as a hidden property. Handle<String> key = factory()->stack_overflow_string(); - Handle<JSObject> boilerplate = - Handle<JSObject>::cast(GetProperty(this, js_builtins_object(), key)); - Handle<JSObject> exception = JSObject::Copy(boilerplate); + Handle<JSObject> boilerplate = Handle<JSObject>::cast( + Object::GetProperty(js_builtins_object(), key).ToHandleChecked()); + Handle<JSObject> exception = factory()->CopyJSObject(boilerplate); DoThrow(*exception, NULL); // Get stack trace limit. - Handle<Object> error = GetProperty(js_builtins_object(), "$Error"); - if (!error->IsJSObject()) return Failure::Exception(); + Handle<Object> error = Object::GetProperty( + this, js_builtins_object(), "$Error").ToHandleChecked(); + if (!error->IsJSObject()) return heap()->exception(); + + Handle<String> stackTraceLimit = + factory()->InternalizeUtf8String("stackTraceLimit"); + ASSERT(!stackTraceLimit.is_null()); Handle<Object> stack_trace_limit = - GetProperty(Handle<JSObject>::cast(error), "stackTraceLimit"); - if (!stack_trace_limit->IsNumber()) return Failure::Exception(); + JSObject::GetDataProperty(Handle<JSObject>::cast(error), + stackTraceLimit); + if (!stack_trace_limit->IsNumber()) return heap()->exception(); double dlimit = stack_trace_limit->Number(); int limit = std::isnan(dlimit) ? 0 : static_cast<int>(dlimit); @@ -893,13 +815,13 @@ Failure* Isolate::StackOverflow() { JSObject::SetHiddenProperty(exception, factory()->hidden_stack_trace_string(), stack_trace); - return Failure::Exception(); + return heap()->exception(); } -Failure* Isolate::TerminateExecution() { +Object* Isolate::TerminateExecution() { DoThrow(heap_.termination_exception(), NULL); - return Failure::Exception(); + return heap()->exception(); } @@ -920,13 +842,13 @@ void Isolate::CancelTerminateExecution() { } -Failure* Isolate::Throw(Object* exception, MessageLocation* location) { +Object* Isolate::Throw(Object* exception, MessageLocation* location) { DoThrow(exception, location); - return Failure::Exception(); + return heap()->exception(); } -Failure* Isolate::ReThrow(MaybeObject* exception) { +Object* Isolate::ReThrow(Object* exception) { bool can_be_caught_externally = false; bool catchable_by_javascript = is_catchable_by_javascript(exception); ShouldReportException(&can_be_caught_externally, catchable_by_javascript); @@ -936,18 +858,17 @@ Failure* Isolate::ReThrow(MaybeObject* exception) { // Set the exception being re-thrown. set_pending_exception(exception); - if (exception->IsFailure()) return exception->ToFailureUnchecked(); - return Failure::Exception(); + return heap()->exception(); } -Failure* Isolate::ThrowIllegalOperation() { +Object* Isolate::ThrowIllegalOperation() { if (FLAG_stack_trace_on_illegal) PrintStack(stdout); return Throw(heap_.illegal_access_string()); } -Failure* Isolate::ThrowInvalidStringLength() { +Object* Isolate::ThrowInvalidStringLength() { return Throw(*factory()->NewRangeError( "invalid_string_length", HandleVector<Object>(NULL, 0))); } @@ -982,8 +903,8 @@ void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) { } -Failure* Isolate::PromoteScheduledException() { - MaybeObject* thrown = scheduled_exception(); +Object* Isolate::PromoteScheduledException() { + Object* thrown = scheduled_exception(); clear_scheduled_exception(); // Re-throw the exception to avoid getting repeated error reporting. return ReThrow(thrown); @@ -1070,15 +991,17 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally, bool Isolate::IsErrorObject(Handle<Object> obj) { if (!obj->IsJSObject()) return false; - String* error_key = - *(factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("$Error"))); - Object* error_constructor = - js_builtins_object()->GetPropertyNoExceptionThrown(error_key); + Handle<String> error_key = + factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("$Error")); + Handle<Object> error_constructor = Object::GetProperty( + js_builtins_object(), error_key).ToHandleChecked(); + DisallowHeapAllocation no_gc; for (Object* prototype = *obj; !prototype->IsNull(); prototype = prototype->GetPrototype(this)) { if (!prototype->IsJSObject()) return false; - if (JSObject::cast(prototype)->map()->constructor() == error_constructor) { + if (JSObject::cast(prototype)->map()->constructor() == + *error_constructor) { return true; } } @@ -1106,12 +1029,10 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) { thread_local_top()->rethrowing_message_ = false; -#ifdef ENABLE_DEBUGGER_SUPPORT // Notify debugger of exception. if (catchable_by_javascript) { debugger_->OnException(exception_handle, report_exception); } -#endif // Generate the message if required. if (report_exception || try_catch_needs_message) { @@ -1129,7 +1050,7 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) { if (capture_stack_trace_for_uncaught_exceptions_) { if (IsErrorObject(exception_handle)) { // We fetch the stack trace that corresponds to this error object. - String* key = heap()->hidden_stack_trace_string(); + Handle<String> key = factory()->hidden_stack_trace_string(); Object* stack_property = JSObject::cast(*exception_handle)->GetHiddenProperty(key); // Property lookup may have failed. In this case it's probably not @@ -1151,10 +1072,9 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) { // before throwing as uncaught exception. Note that the pending // exception object to be set later must not be turned into a string. if (exception_arg->IsJSObject() && !IsErrorObject(exception_arg)) { - bool failed = false; - exception_arg = - Execution::ToDetailString(this, exception_arg, &failed); - if (failed) { + MaybeHandle<Object> maybe_exception = + Execution::ToDetailString(this, exception_arg); + if (!maybe_exception.ToHandle(&exception_arg)) { exception_arg = factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("exception")); } @@ -1192,19 +1112,19 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) { // In this case we could have an extension (or an internal error // somewhere) and we print out the line number at which the error occured // to the console for easier debugging. - int line_number = GetScriptLineNumberSafe(location->script(), - location->start_pos()); + int line_number = + location->script()->GetLineNumber(location->start_pos()) + 1; if (exception->IsString() && location->script()->name()->IsString()) { OS::PrintError( "Extension or internal compilation error: %s in %s at line %d.\n", String::cast(exception)->ToCString().get(), String::cast(location->script()->name())->ToCString().get(), - line_number + 1); + line_number); } else if (location->script()->name()->IsString()) { OS::PrintError( "Extension or internal compilation error in %s at line %d.\n", String::cast(location->script()->name())->ToCString().get(), - line_number + 1); + line_number); } else { OS::PrintError("Extension or internal compilation error.\n"); } @@ -1271,7 +1191,7 @@ void Isolate::ReportPendingMessages() { HandleScope scope(this); if (thread_local_top_.pending_exception_ == - heap()->termination_exception()) { + heap()->termination_exception()) { // Do nothing: if needed, the exception has been already propagated to // v8::TryCatch. } else { @@ -1381,7 +1301,6 @@ Handle<Context> Isolate::global_context() { Handle<Context> Isolate::GetCallingNativeContext() { JavaScriptFrameIterator it(this); -#ifdef ENABLE_DEBUGGER_SUPPORT if (debug_->InDebugger()) { while (!it.done()) { JavaScriptFrame* frame = it.frame(); @@ -1393,7 +1312,6 @@ Handle<Context> Isolate::GetCallingNativeContext() { } } } -#endif // ENABLE_DEBUGGER_SUPPORT if (it.done()) return Handle<Context>::null(); JavaScriptFrame* frame = it.frame(); Context* context = Context::cast(frame->context()); @@ -1504,10 +1422,10 @@ Isolate::Isolate() compilation_cache_(NULL), counters_(NULL), code_range_(NULL), - debugger_initialized_(false), logger_(NULL), stats_table_(NULL), stub_cache_(NULL), + code_aging_helper_(NULL), deoptimizer_data_(NULL), materialized_object_store_(NULL), capture_stack_trace_for_uncaught_exceptions_(false), @@ -1565,11 +1483,6 @@ Isolate::Isolate() memset(&js_spill_information_, 0, sizeof(js_spill_information_)); #endif -#ifdef ENABLE_DEBUGGER_SUPPORT - debug_ = NULL; - debugger_ = NULL; -#endif - handle_scope_data_.Initialize(); #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \ @@ -1581,6 +1494,10 @@ Isolate::Isolate() memset(name##_, 0, sizeof(type) * length); ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE) #undef ISOLATE_INIT_ARRAY_EXECUTE + + InitializeLoggingAndCounters(); + debug_ = new Debug(this); + debugger_ = new Debugger(this); } @@ -1624,9 +1541,7 @@ void Isolate::Deinit() { if (state_ == INITIALIZED) { TRACE_ISOLATE(deinit); -#ifdef ENABLE_DEBUGGER_SUPPORT debugger()->UnloadDebugger(); -#endif if (concurrent_recompilation_enabled()) { optimizing_compiler_thread_->Stop(); @@ -1713,10 +1628,7 @@ Isolate::~Isolate() { // Has to be called while counters_ are still alive runtime_zone_.DeleteKeptSegment(); - // The entry stack must be empty when we get here, - // except for the default isolate, where it can - // still contain up to one entry stack item - ASSERT(entry_stack_ == NULL || this == default_isolate_); + // The entry stack must be empty when we get here. ASSERT(entry_stack_ == NULL || entry_stack_->previous_item == NULL); delete entry_stack_; @@ -1749,6 +1661,8 @@ Isolate::~Isolate() { delete stub_cache_; stub_cache_ = NULL; + delete code_aging_helper_; + code_aging_helper_ = NULL; delete stats_table_; stats_table_ = NULL; @@ -1797,12 +1711,10 @@ Isolate::~Isolate() { delete random_number_generator_; random_number_generator_ = NULL; -#ifdef ENABLE_DEBUGGER_SUPPORT delete debugger_; debugger_ = NULL; delete debug_; debug_ = NULL; -#endif } @@ -1827,9 +1739,6 @@ void Isolate::PropagatePendingExceptionToExternalTryCatch() { try_catch_handler()->exception_ = heap()->null_value(); } else { v8::TryCatch* handler = try_catch_handler(); - // At this point all non-object (failure) exceptions have - // been dealt with so this shouldn't fail. - ASSERT(!pending_exception()->IsFailure()); ASSERT(thread_local_top_.pending_message_obj_->IsJSMessageObject() || thread_local_top_.pending_message_obj_->IsTheHole()); ASSERT(thread_local_top_.pending_message_script_->IsScript() || @@ -1858,18 +1767,6 @@ void Isolate::InitializeLoggingAndCounters() { } -void Isolate::InitializeDebugger() { -#ifdef ENABLE_DEBUGGER_SUPPORT - LockGuard<RecursiveMutex> lock_guard(debugger_access()); - if (NoBarrier_Load(&debugger_initialized_)) return; - InitializeLoggingAndCounters(); - debug_ = new Debug(this); - debugger_ = new Debugger(this); - Release_Store(&debugger_initialized_, true); -#endif -} - - bool Isolate::Init(Deserializer* des) { ASSERT(state_ != INITIALIZED); TRACE_ISOLATE(init); @@ -1879,8 +1776,8 @@ bool Isolate::Init(Deserializer* des) { has_fatal_error_ = false; use_crankshaft_ = FLAG_crankshaft - && !Serializer::enabled() - && CPU::SupportsCrankshaft(); + && !Serializer::enabled(this) + && CpuFeatures::SupportsCrankshaft(); if (function_entry_hook() != NULL) { // When function entry hooking is in effect, we have to create the code @@ -1893,10 +1790,6 @@ bool Isolate::Init(Deserializer* des) { // The initialization process does not handle memory exhaustion. DisallowAllocationFailure disallow_allocation_failure(this); - InitializeLoggingAndCounters(); - - InitializeDebugger(); - memory_allocator_ = new MemoryAllocator(this); code_range_ = new CodeRange(this); @@ -1944,6 +1837,8 @@ bool Isolate::Init(Deserializer* des) { #endif #endif + code_aging_helper_ = new CodeAgingHelper(); + { // NOLINT // Ensure that the thread has a valid stack guard. The v8::Locker object // will ensure this too, but we don't have to use lockers if we are only @@ -2011,10 +1906,6 @@ bool Isolate::Init(Deserializer* des) { } } -#ifdef ENABLE_DEBUGGER_SUPPORT - debug_->SetUp(create_heap_objects); -#endif - // If we are deserializing, read the state into the now-empty heap. if (!create_heap_objects) { des->Deserialize(this); @@ -2073,7 +1964,7 @@ bool Isolate::Init(Deserializer* des) { kDeoptTableSerializeEntryCount - 1); } - if (!Serializer::enabled()) { + if (!Serializer::enabled(this)) { // Ensure that all stubs which need to be generated ahead of time, but // cannot be serialized into the snapshot have been generated. HandleScope scope(this); @@ -2310,11 +2201,57 @@ Handle<JSObject> Isolate::GetSymbolRegistry() { Handle<String> name = factory()->InternalizeUtf8String(nested[i]); Handle<JSObject> obj = factory()->NewJSObjectFromMap(map); JSObject::NormalizeProperties(obj, KEEP_INOBJECT_PROPERTIES, 8); - JSObject::SetProperty(registry, name, obj, NONE, STRICT); + JSObject::SetProperty(registry, name, obj, NONE, STRICT).Assert(); } } return Handle<JSObject>::cast(factory()->symbol_registry()); } +void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) { + for (int i = 0; i < call_completed_callbacks_.length(); i++) { + if (callback == call_completed_callbacks_.at(i)) return; + } + call_completed_callbacks_.Add(callback); +} + + +void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) { + for (int i = 0; i < call_completed_callbacks_.length(); i++) { + if (callback == call_completed_callbacks_.at(i)) { + call_completed_callbacks_.Remove(i); + } + } +} + + +void Isolate::FireCallCompletedCallback() { + bool has_call_completed_callbacks = !call_completed_callbacks_.is_empty(); + bool run_microtasks = autorun_microtasks() && microtask_pending(); + if (!has_call_completed_callbacks && !run_microtasks) return; + + if (!handle_scope_implementer()->CallDepthIsZero()) return; + // Fire callbacks. Increase call depth to prevent recursive callbacks. + handle_scope_implementer()->IncrementCallDepth(); + if (run_microtasks) Execution::RunMicrotasks(this); + for (int i = 0; i < call_completed_callbacks_.length(); i++) { + call_completed_callbacks_.at(i)(); + } + handle_scope_implementer()->DecrementCallDepth(); +} + + +void Isolate::RunMicrotasks() { + if (!microtask_pending()) + return; + + ASSERT(handle_scope_implementer()->CallDepthIsZero()); + + // Increase call depth to prevent recursive callbacks. + handle_scope_implementer()->IncrementCallDepth(); + Execution::RunMicrotasks(this); + handle_scope_implementer()->DecrementCallDepth(); +} + + } } // namespace v8::internal diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h index b4713786a..5b1e77f20 100644 --- a/deps/v8/src/isolate.h +++ b/deps/v8/src/isolate.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ISOLATE_H_ #define V8_ISOLATE_H_ @@ -77,6 +54,7 @@ class InlineRuntimeFunctionsTable; class InnerPointerToCodeCache; class MaterializedObjectStore; class NoAllocationStringAllocator; +class CodeAgingHelper; class RandomNumberGenerator; class RegExpStack; class SaveContext; @@ -95,11 +73,9 @@ template <StateTag Tag> class VMState; typedef void* ExternalReferenceRedirectorPointer(); -#ifdef ENABLE_DEBUGGER_SUPPORT class Debug; class Debugger; class DebuggerAgent; -#endif #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \ @@ -117,7 +93,7 @@ class Simulator; // of handles to the actual constants. typedef ZoneList<Handle<Object> > ZoneObjectList; -#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \ +#define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \ do { \ Isolate* __isolate__ = (isolate); \ if (__isolate__->has_scheduled_exception()) { \ @@ -125,31 +101,46 @@ typedef ZoneList<Handle<Object> > ZoneObjectList; } \ } while (false) -#define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T) \ - do { \ - Isolate* __isolate__ = (isolate); \ - if (__isolate__->has_scheduled_exception()) { \ - __isolate__->PromoteScheduledException(); \ - return Handle<T>::null(); \ - } \ +// Macros for MaybeHandle. + +#define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \ + do { \ + Isolate* __isolate__ = (isolate); \ + if (__isolate__->has_scheduled_exception()) { \ + __isolate__->PromoteScheduledException(); \ + return MaybeHandle<T>(); \ + } \ } while (false) -#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \ - do { \ - if ((call).is_null()) { \ - ASSERT((isolate)->has_pending_exception()); \ - return (value); \ - } \ +#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \ + do { \ + if (!(call).ToHandle(&dst)) { \ + ASSERT((isolate)->has_pending_exception()); \ + return value; \ + } \ } while (false) -#define CHECK_NOT_EMPTY_HANDLE(isolate, call) \ - do { \ - ASSERT(!(isolate)->has_pending_exception()); \ - CHECK(!(call).is_null()); \ +#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \ + ASSIGN_RETURN_ON_EXCEPTION_VALUE( \ + isolate, dst, call, isolate->heap()->exception()) + +#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \ + ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>()) + +#define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \ + do { \ + if ((call).is_null()) { \ + ASSERT((isolate)->has_pending_exception()); \ + return value; \ + } \ } while (false) -#define RETURN_IF_EMPTY_HANDLE(isolate, call) \ - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception()) +#define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \ + RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception()) + +#define RETURN_ON_EXCEPTION(isolate, call, T) \ + RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>()) + #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \ C(Handler, handler) \ @@ -251,7 +242,7 @@ class ThreadLocalTop BASE_EMBEDDED { // lookups. Context* context_; ThreadId thread_id_; - MaybeObject* pending_exception_; + Object* pending_exception_; bool has_pending_message_; bool rethrowing_message_; Object* pending_message_obj_; @@ -261,7 +252,7 @@ class ThreadLocalTop BASE_EMBEDDED { // Use a separate value for scheduled exceptions to preserve the // invariants that hold about pending_exception. We may want to // unify them later. - MaybeObject* scheduled_exception_; + Object* scheduled_exception_; bool external_caught_exception_; SaveContext* save_context_; v8::TryCatch* catcher_; @@ -295,17 +286,6 @@ class ThreadLocalTop BASE_EMBEDDED { }; -#ifdef ENABLE_DEBUGGER_SUPPORT - -#define ISOLATE_DEBUGGER_INIT_LIST(V) \ - V(DebuggerAgent*, debugger_agent_instance, NULL) -#else - -#define ISOLATE_DEBUGGER_INIT_LIST(V) - -#endif - - #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \ V8_TARGET_ARCH_MIPS && !defined(__mips__) @@ -378,8 +358,8 @@ typedef List<HeapObject*> DebugObjectCache; V(bool, fp_stubs_generated, false) \ V(int, max_available_threads, 0) \ V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \ - ISOLATE_INIT_SIMULATOR_LIST(V) \ - ISOLATE_DEBUGGER_INIT_LIST(V) + V(DebuggerAgent*, debugger_agent_instance, NULL) \ + ISOLATE_INIT_SIMULATOR_LIST(V) #define THREAD_LOCAL_TOP_ACCESSOR(type, name) \ inline void set_##name(type v) { thread_local_top_.name##_ = v; } \ @@ -515,16 +495,6 @@ class Isolate { // If one does not yet exist, return null. PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id); -#ifdef ENABLE_DEBUGGER_SUPPORT - // Get the debugger from the default isolate. Preinitializes the - // default isolate if needed. - static Debugger* GetDefaultIsolateDebugger(); -#endif - - // Get the stack guard from the default isolate. Preinitializes the - // default isolate if needed. - static StackGuard* GetDefaultIsolateStackGuard(); - // Returns the key used to store the pointer to the current isolate. // Used internally for V8 threads that do not execute JavaScript but still // are part of the domain of an isolate (like the context switcher). @@ -539,12 +509,6 @@ class Isolate { static Thread::LocalStorageKey per_isolate_thread_data_key(); - // If a client attempts to create a Locker without specifying an isolate, - // we assume that the client is using legacy behavior. Set up the current - // thread to be inside the implicit isolate (or fail a check if we have - // switched to non-legacy behavior). - static void EnterDefaultIsolate(); - // Mutex for serializing access to break control structures. RecursiveMutex* break_access() { return &break_access_; } @@ -567,24 +531,28 @@ class Isolate { THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id) // Interface to pending exception. - MaybeObject* pending_exception() { + Object* pending_exception() { ASSERT(has_pending_exception()); + ASSERT(!thread_local_top_.pending_exception_->IsException()); return thread_local_top_.pending_exception_; } - void set_pending_exception(MaybeObject* exception) { - thread_local_top_.pending_exception_ = exception; + void set_pending_exception(Object* exception_obj) { + ASSERT(!exception_obj->IsException()); + thread_local_top_.pending_exception_ = exception_obj; } void clear_pending_exception() { + ASSERT(!thread_local_top_.pending_exception_->IsException()); thread_local_top_.pending_exception_ = heap_.the_hole_value(); } - MaybeObject** pending_exception_address() { + Object** pending_exception_address() { return &thread_local_top_.pending_exception_; } bool has_pending_exception() { + ASSERT(!thread_local_top_.pending_exception_->IsException()); return !thread_local_top_.pending_exception_->IsTheHole(); } @@ -607,7 +575,7 @@ class Isolate { THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher) - MaybeObject** scheduled_exception_address() { + Object** scheduled_exception_address() { return &thread_local_top_.scheduled_exception_; } @@ -624,20 +592,23 @@ class Isolate { &thread_local_top_.pending_message_script_); } - MaybeObject* scheduled_exception() { + Object* scheduled_exception() { ASSERT(has_scheduled_exception()); + ASSERT(!thread_local_top_.scheduled_exception_->IsException()); return thread_local_top_.scheduled_exception_; } bool has_scheduled_exception() { + ASSERT(!thread_local_top_.scheduled_exception_->IsException()); return thread_local_top_.scheduled_exception_ != heap_.the_hole_value(); } void clear_scheduled_exception() { + ASSERT(!thread_local_top_.scheduled_exception_->IsException()); thread_local_top_.scheduled_exception_ = heap_.the_hole_value(); } bool IsExternallyCaught(); - bool is_catchable_by_javascript(MaybeObject* exception) { + bool is_catchable_by_javascript(Object* exception) { return exception != heap()->termination_exception(); } @@ -693,11 +664,10 @@ class Isolate { class ExceptionScope { public: explicit ExceptionScope(Isolate* isolate) : - // Scope currently can only be used for regular exceptions, not - // failures like OOM or termination exception. + // Scope currently can only be used for regular exceptions, + // not termination exception. isolate_(isolate), - pending_exception_(isolate_->pending_exception()->ToObjectUnchecked(), - isolate_), + pending_exception_(isolate_->pending_exception(), isolate_), catcher_(isolate_->catcher()) { } @@ -738,39 +708,31 @@ class Isolate { // the result is false, the pending exception is guaranteed to be // set. - // TODO(yangguo): temporary wrappers - bool MayNamedAccessWrapper(Handle<JSObject> receiver, - Handle<Object> key, - v8::AccessType type) { - return MayNamedAccess(*receiver, *key, type); - } - bool MayIndexedAccessWrapper(Handle<JSObject> receiver, - uint32_t index, - v8::AccessType type) { - return MayIndexedAccess(*receiver, index, type); - } - void ReportFailedAccessCheckWrapper(Handle<JSObject> receiver, - v8::AccessType type) { - ReportFailedAccessCheck(*receiver, type); - } - - bool MayNamedAccess(JSObject* receiver, - Object* key, + bool MayNamedAccess(Handle<JSObject> receiver, + Handle<Object> key, v8::AccessType type); - bool MayIndexedAccess(JSObject* receiver, + bool MayIndexedAccess(Handle<JSObject> receiver, uint32_t index, v8::AccessType type); void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback); - void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type); + void ReportFailedAccessCheck(Handle<JSObject> receiver, v8::AccessType type); // Exception throwing support. The caller should use the result // of Throw() as its return value. - Failure* Throw(Object* exception, MessageLocation* location = NULL); + Object* Throw(Object* exception, MessageLocation* location = NULL); + + template <typename T> + MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception, + MessageLocation* location = NULL) { + Throw(*exception, location); + return MaybeHandle<T>(); + } + // Re-throw an exception. This involves no error reporting since // error reporting was handled when the exception was thrown // originally. - Failure* ReThrow(MaybeObject* exception); + Object* ReThrow(Object* exception); void ScheduleThrow(Object* exception); // Re-set pending message, script and positions reported to the TryCatch // back to the TLS for re-use when rethrowing. @@ -778,11 +740,11 @@ class Isolate { void ReportPendingMessages(); // Return pending location if any or unfilled structure. MessageLocation GetMessageLocation(); - Failure* ThrowIllegalOperation(); - Failure* ThrowInvalidStringLength(); + Object* ThrowIllegalOperation(); + Object* ThrowInvalidStringLength(); // Promote a scheduled exception to pending. Asserts has_scheduled_exception. - Failure* PromoteScheduledException(); + Object* PromoteScheduledException(); void DoThrow(Object* exception, MessageLocation* location); // Checks if exception should be reported and finds out if it's // caught externally. @@ -794,8 +756,8 @@ class Isolate { void ComputeLocation(MessageLocation* target); // Out of resource exception helpers. - Failure* StackOverflow(); - Failure* TerminateExecution(); + Object* StackOverflow(); + Object* TerminateExecution(); void CancelTerminateExecution(); // Administration @@ -875,6 +837,7 @@ class Isolate { Heap* heap() { return &heap_; } StatsTable* stats_table(); StubCache* stub_cache() { return stub_cache_; } + CodeAgingHelper* code_aging_helper() { return code_aging_helper_; } DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } ThreadLocalTop* thread_local_top() { return &thread_local_top_; } MaterializedObjectStore* materialized_object_store() { @@ -967,16 +930,8 @@ class Isolate { inline bool IsCodePreAgingActive(); -#ifdef ENABLE_DEBUGGER_SUPPORT - Debugger* debugger() { - if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); - return debugger_; - } - Debug* debug() { - if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); - return debug_; - } -#endif + Debugger* debugger() { return debugger_; } + Debug* debug() { return debug_; } inline bool IsDebuggerActive(); inline bool DebuggerHasBreakPoints(); @@ -1085,11 +1040,6 @@ class Isolate { return sweeper_thread_; } - // PreInits and returns a default isolate. Needed when a new thread tries - // to create a Locker for the first time (the lock itself is in the isolate). - // TODO(svenpanne) This method is on death row... - static v8::Isolate* GetDefaultIsolateForLocking(); - int id() const { return static_cast<int>(id_); } HStatistics* GetHStatistics(); @@ -1119,6 +1069,12 @@ class Isolate { // Get (and lazily initialize) the registry for per-isolate symbols. Handle<JSObject> GetSymbolRegistry(); + void AddCallCompletedCallback(CallCompletedCallback callback); + void RemoveCallCompletedCallback(CallCompletedCallback callback); + void FireCallCompletedCallback(); + + void RunMicrotasks(); + private: Isolate(); @@ -1222,8 +1178,6 @@ class Isolate { void PropagatePendingExceptionToExternalTryCatch(); - void InitializeDebugger(); - // Traverse prototype chain to find out whether the object is derived from // the Error object. bool IsErrorObject(Handle<Object> obj); @@ -1245,6 +1199,7 @@ class Isolate { StackGuard stack_guard_; StatsTable* stats_table_; StubCache* stub_cache_; + CodeAgingHelper* code_aging_helper_; DeoptimizerData* deoptimizer_data_; MaterializedObjectStore* materialized_object_store_; ThreadLocalTop thread_local_top_; @@ -1300,10 +1255,8 @@ class Isolate { JSObject::SpillInformation js_spill_information_; #endif -#ifdef ENABLE_DEBUGGER_SUPPORT Debugger* debugger_; Debug* debug_; -#endif CpuProfiler* cpu_profiler_; HeapProfiler* heap_profiler_; FunctionEntryHook function_entry_hook_; @@ -1339,6 +1292,9 @@ class Isolate { int next_optimization_id_; + // List of callbacks when a Call completes. + List<CallCompletedCallback> call_completed_callbacks_; + friend class ExecutionAccess; friend class HandleScopeImplementer; friend class IsolateInitializer; diff --git a/deps/v8/src/json-parser.h b/deps/v8/src/json-parser.h index 4c2b47918..f3017784b 100644 --- a/deps/v8/src/json-parser.h +++ b/deps/v8/src/json-parser.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_JSON_PARSER_H_ #define V8_JSON_PARSER_H_ @@ -31,7 +8,7 @@ #include "v8.h" #include "char-predicates-inl.h" -#include "v8conversions.h" +#include "conversions.h" #include "messages.h" #include "spaces-inl.h" #include "token.h" @@ -43,7 +20,7 @@ namespace internal { template <bool seq_ascii> class JsonParser BASE_EMBEDDED { public: - static Handle<Object> Parse(Handle<String> source) { + MUST_USE_RESULT static MaybeHandle<Object> Parse(Handle<String> source) { return JsonParser(source).ParseJson(); } @@ -59,7 +36,7 @@ class JsonParser BASE_EMBEDDED { object_constructor_(isolate_->native_context()->object_function(), isolate_), position_(-1) { - FlattenString(source_); + source_ = String::Flatten(source_); pretenure_ = (source_length_ >= kPretenureTreshold) ? TENURED : NOT_TENURED; // Optimized fast case where we only have ASCII characters. @@ -69,7 +46,7 @@ class JsonParser BASE_EMBEDDED { } // Parse a string containing a single JSON value. - Handle<Object> ParseJson(); + MaybeHandle<Object> ParseJson(); inline void Advance() { position_++; @@ -219,7 +196,7 @@ class JsonParser BASE_EMBEDDED { }; template <bool seq_ascii> -Handle<Object> JsonParser<seq_ascii>::ParseJson() { +MaybeHandle<Object> JsonParser<seq_ascii>::ParseJson() { // Advance to the first character (possibly EOS) AdvanceSkipWhitespace(); Handle<Object> result = ParseJsonValue(); @@ -257,8 +234,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJson() { break; default: message = "unexpected_token"; - Handle<Object> name = - LookupSingleCharacterStringFromCode(isolate_, c0_); + Handle<Object> name = factory->LookupSingleCharacterStringFromCode(c0_); Handle<FixedArray> element = factory->NewFixedArray(1); element->set(0, *name); array = factory->NewJSArrayWithElements(element); @@ -268,9 +244,8 @@ Handle<Object> JsonParser<seq_ascii>::ParseJson() { MessageLocation location(factory->NewScript(source_), position_, position_ + 1); - Handle<Object> result = factory->NewSyntaxError(message, array); - isolate()->Throw(*result, &location); - return Handle<Object>::null(); + Handle<Object> error = factory->NewSyntaxError(message, array); + return isolate()->template Throw<Object>(error, &location); } return result; } @@ -361,7 +336,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() { Handle<Object> value = ParseJsonValue(); if (value.is_null()) return ReportUnexpectedCharacter(); - JSObject::SetOwnElement(json_object, index, value, SLOPPY); + JSObject::SetOwnElement(json_object, index, value, SLOPPY).Assert(); continue; } // Not an index, fallback to the slow path. @@ -417,7 +392,15 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() { if (value->IsSmi() && expected_representation.IsDouble()) { value = factory()->NewHeapNumber( Handle<Smi>::cast(value)->value()); + } else if (expected_representation.IsHeapObject() && + !target->instance_descriptors()->GetFieldType( + descriptor)->NowContains(value)) { + Handle<HeapType> value_type(value->OptimalType( + isolate(), expected_representation)); + Map::GeneralizeFieldType(target, descriptor, value_type); } + ASSERT(target->instance_descriptors()->GetFieldType( + descriptor)->NowContains(value)); properties.Add(value, zone()); map = target; continue; @@ -443,7 +426,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() { } JSObject::SetLocalPropertyIgnoreAttributes( - json_object, key, value, NONE); + json_object, key, value, NONE).Assert(); } while (MatchSkipWhiteSpace(',')); if (c0_ != '}') { return ReportUnexpectedCharacter(); @@ -541,17 +524,16 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonNumber() { if (seq_ascii) { Vector<const uint8_t> chars(seq_source_->GetChars() + beg_pos, length); number = StringToDouble(isolate()->unicode_cache(), - Vector<const char>::cast(chars), - NO_FLAGS, // Hex, octal or trailing junk. - OS::nan_value()); + chars, + NO_FLAGS, // Hex, octal or trailing junk. + OS::nan_value()); } else { Vector<uint8_t> buffer = Vector<uint8_t>::New(length); String::WriteToFlat(*source_, buffer.start(), beg_pos, position_); Vector<const uint8_t> result = Vector<const uint8_t>(buffer.start(), length); number = StringToDouble(isolate()->unicode_cache(), - // TODO(dcarney): Convert StringToDouble to uint_t. - Vector<const char>::cast(result), + result, NO_FLAGS, // Hex, octal or trailing junk. 0.0); buffer.Dispose(); @@ -583,14 +565,14 @@ template <> inline Handle<SeqTwoByteString> NewRawString(Factory* factory, int length, PretenureFlag pretenure) { - return factory->NewRawTwoByteString(length, pretenure); + return factory->NewRawTwoByteString(length, pretenure).ToHandleChecked(); } template <> inline Handle<SeqOneByteString> NewRawString(Factory* factory, int length, PretenureFlag pretenure) { - return factory->NewRawOneByteString(length, pretenure); + return factory->NewRawOneByteString(length, pretenure).ToHandleChecked(); } @@ -606,7 +588,6 @@ Handle<String> JsonParser<seq_ascii>::SlowScanJsonString( int length = Min(max_length, Max(kInitialSpecialStringLength, 2 * count)); Handle<StringType> seq_string = NewRawString<StringType>(factory(), length, pretenure_); - ASSERT(!seq_string.is_null()); // Copy prefix into seq_str. SinkChar* dest = seq_string->GetChars(); String::WriteToFlat(*prefix, dest, start, end); @@ -793,8 +774,8 @@ Handle<String> JsonParser<seq_ascii>::ScanJsonString() { } } while (c0_ != '"'); int length = position_ - beg_pos; - Handle<String> result = factory()->NewRawOneByteString(length, pretenure_); - ASSERT(!result.is_null()); + Handle<String> result = + factory()->NewRawOneByteString(length, pretenure_).ToHandleChecked(); uint8_t* dest = SeqOneByteString::cast(*result)->GetChars(); String::WriteToFlat(*source_, dest, beg_pos, position_); diff --git a/deps/v8/src/json-stringifier.h b/deps/v8/src/json-stringifier.h index 3926969f6..7eb6746df 100644 --- a/deps/v8/src/json-stringifier.h +++ b/deps/v8/src/json-stringifier.h @@ -1,36 +1,13 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_JSON_STRINGIFIER_H_ #define V8_JSON_STRINGIFIER_H_ #include "v8.h" -#include "v8utils.h" -#include "v8conversions.h" +#include "conversions.h" +#include "utils.h" namespace v8 { namespace internal { @@ -39,17 +16,18 @@ class BasicJsonStringifier BASE_EMBEDDED { public: explicit BasicJsonStringifier(Isolate* isolate); - MaybeObject* Stringify(Handle<Object> object); + MUST_USE_RESULT MaybeHandle<Object> Stringify(Handle<Object> object); - INLINE(static MaybeObject* StringifyString(Isolate* isolate, - Handle<String> object)); + MUST_USE_RESULT INLINE(static MaybeHandle<Object> StringifyString( + Isolate* isolate, + Handle<String> object)); private: static const int kInitialPartLength = 32; static const int kMaxPartLength = 16 * 1024; static const int kPartLengthGrowthFactor = 2; - enum Result { UNCHANGED, SUCCESS, EXCEPTION, CIRCULAR, STACK_OVERFLOW }; + enum Result { UNCHANGED, SUCCESS, EXCEPTION }; void Accumulate(); @@ -81,8 +59,9 @@ class BasicJsonStringifier BASE_EMBEDDED { } } - Handle<Object> ApplyToJsonFunction(Handle<Object> object, - Handle<Object> key); + MUST_USE_RESULT MaybeHandle<Object> ApplyToJsonFunction( + Handle<Object> object, + Handle<Object> key); Result SerializeGeneric(Handle<Object> object, Handle<Object> key, @@ -90,9 +69,9 @@ class BasicJsonStringifier BASE_EMBEDDED { bool deferred_key); template <typename ResultType, typename Char> - INLINE(static MaybeObject* StringifyString_(Isolate* isolate, - Vector<Char> vector, - Handle<String> result)); + INLINE(static Handle<String> StringifyString_(Isolate* isolate, + Vector<Char> vector, + Handle<String> result)); // Entry point to serialize the object. INLINE(Result SerializeObject(Handle<Object> obj)) { @@ -140,7 +119,7 @@ class BasicJsonStringifier BASE_EMBEDDED { INLINE(Result SerializeJSArray(Handle<JSArray> object)); INLINE(Result SerializeJSObject(Handle<JSObject> object)); - Result SerializeJSArraySlow(Handle<JSArray> object, int length); + Result SerializeJSArraySlow(Handle<JSArray> object, uint32_t length); void SerializeString(Handle<String> object); @@ -263,38 +242,33 @@ BasicJsonStringifier::BasicJsonStringifier(Isolate* isolate) overflowed_(false) { factory_ = isolate_->factory(); accumulator_store_ = Handle<JSValue>::cast( - factory_->ToObject(factory_->empty_string())); + Object::ToObject(isolate, factory_->empty_string()).ToHandleChecked()); part_length_ = kInitialPartLength; - current_part_ = factory_->NewRawOneByteString(part_length_); - ASSERT(!current_part_.is_null()); + current_part_ = factory_->NewRawOneByteString(part_length_).ToHandleChecked(); tojson_string_ = factory_->toJSON_string(); stack_ = factory_->NewJSArray(8); } -MaybeObject* BasicJsonStringifier::Stringify(Handle<Object> object) { - switch (SerializeObject(object)) { - case UNCHANGED: - return isolate_->heap()->undefined_value(); - case SUCCESS: { - ShrinkCurrentPart(); - Accumulate(); - if (overflowed_) return isolate_->ThrowInvalidStringLength(); - return *accumulator(); +MaybeHandle<Object> BasicJsonStringifier::Stringify(Handle<Object> object) { + Result result = SerializeObject(object); + if (result == UNCHANGED) return isolate_->factory()->undefined_value(); + if (result == SUCCESS) { + ShrinkCurrentPart(); + Accumulate(); + if (overflowed_) { + return isolate_->Throw<Object>( + isolate_->factory()->NewInvalidStringLengthError()); } - case CIRCULAR: - return isolate_->Throw(*factory_->NewTypeError( - "circular_structure", HandleVector<Object>(NULL, 0))); - case STACK_OVERFLOW: - return isolate_->StackOverflow(); - default: - return Failure::Exception(); + return accumulator(); } + ASSERT(result == EXCEPTION); + return MaybeHandle<Object>(); } -MaybeObject* BasicJsonStringifier::StringifyString(Isolate* isolate, - Handle<String> object) { +MaybeHandle<Object> BasicJsonStringifier::StringifyString( + Isolate* isolate, Handle<String> object) { static const int kJsonQuoteWorstCaseBlowup = 6; static const int kSpaceForQuotes = 2; int worst_case_length = @@ -305,21 +279,19 @@ MaybeObject* BasicJsonStringifier::StringifyString(Isolate* isolate, return stringifier.Stringify(object); } - FlattenString(object); + object = String::Flatten(object); ASSERT(object->IsFlat()); if (object->IsOneByteRepresentationUnderneath()) { - Handle<String> result = - isolate->factory()->NewRawOneByteString(worst_case_length); - ASSERT(!result.is_null()); + Handle<String> result = isolate->factory()->NewRawOneByteString( + worst_case_length).ToHandleChecked(); DisallowHeapAllocation no_gc; return StringifyString_<SeqOneByteString>( isolate, object->GetFlatContent().ToOneByteVector(), result); } else { - Handle<String> result = - isolate->factory()->NewRawTwoByteString(worst_case_length); - ASSERT(!result.is_null()); + Handle<String> result = isolate->factory()->NewRawTwoByteString( + worst_case_length).ToHandleChecked(); DisallowHeapAllocation no_gc; return StringifyString_<SeqTwoByteString>( isolate, @@ -330,9 +302,9 @@ MaybeObject* BasicJsonStringifier::StringifyString(Isolate* isolate, template <typename ResultType, typename Char> -MaybeObject* BasicJsonStringifier::StringifyString_(Isolate* isolate, - Vector<Char> vector, - Handle<String> result) { +Handle<String> BasicJsonStringifier::StringifyString_(Isolate* isolate, + Vector<Char> vector, + Handle<String> result) { DisallowHeapAllocation no_gc; int final_size = 0; ResultType* dest = ResultType::cast(*result); @@ -341,7 +313,7 @@ MaybeObject* BasicJsonStringifier::StringifyString_(Isolate* isolate, dest->GetChars() + 1, vector.length()); dest->Set(final_size++, '\"'); - return *SeqString::Truncate(Handle<SeqString>::cast(result), final_size); + return SeqString::Truncate(Handle<SeqString>::cast(result), final_size); } @@ -364,25 +336,27 @@ void BasicJsonStringifier::Append_(const Char* chars) { } -Handle<Object> BasicJsonStringifier::ApplyToJsonFunction( +MaybeHandle<Object> BasicJsonStringifier::ApplyToJsonFunction( Handle<Object> object, Handle<Object> key) { LookupResult lookup(isolate_); - JSObject::cast(*object)->LookupRealNamedProperty(*tojson_string_, &lookup); + JSObject::cast(*object)->LookupRealNamedProperty(tojson_string_, &lookup); if (!lookup.IsProperty()) return object; PropertyAttributes attr; - Handle<Object> fun = - Object::GetProperty(object, object, &lookup, tojson_string_, &attr); - if (fun.is_null()) return Handle<Object>::null(); + Handle<Object> fun; + ASSIGN_RETURN_ON_EXCEPTION( + isolate_, fun, + Object::GetProperty(object, object, &lookup, tojson_string_, &attr), + Object); if (!fun->IsJSFunction()) return object; // Call toJSON function. if (key->IsSmi()) key = factory_->NumberToString(key); Handle<Object> argv[] = { key }; - bool has_exception = false; HandleScope scope(isolate_); - object = Execution::Call(isolate_, fun, object, 1, argv, &has_exception); - // Return empty handle to signal an exception. - if (has_exception) return Handle<Object>::null(); + ASSIGN_RETURN_ON_EXCEPTION( + isolate_, object, + Execution::Call(isolate_, fun, object, 1, argv), + Object); return scope.CloseAndEscape(object); } @@ -390,7 +364,10 @@ Handle<Object> BasicJsonStringifier::ApplyToJsonFunction( BasicJsonStringifier::Result BasicJsonStringifier::StackPush( Handle<Object> object) { StackLimitCheck check(isolate_); - if (check.HasOverflowed()) return STACK_OVERFLOW; + if (check.HasOverflowed()) { + isolate_->StackOverflow(); + return EXCEPTION; + } int length = Smi::cast(stack_->length())->value(); { @@ -398,7 +375,10 @@ BasicJsonStringifier::Result BasicJsonStringifier::StackPush( FixedArray* elements = FixedArray::cast(stack_->elements()); for (int i = 0; i < length; i++) { if (elements->get(i) == *object) { - return CIRCULAR; + AllowHeapAllocation allow_to_return_error; + isolate_->Throw(*factory_->NewTypeError( + "circular_structure", HandleVector<Object>(NULL, 0))); + return EXCEPTION; } } } @@ -419,8 +399,10 @@ template <bool deferred_string_key> BasicJsonStringifier::Result BasicJsonStringifier::Serialize_( Handle<Object> object, bool comma, Handle<Object> key) { if (object->IsJSObject()) { - object = ApplyToJsonFunction(object, key); - if (object.is_null()) return EXCEPTION; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, object, + ApplyToJsonFunction(object, key), + EXCEPTION); } if (object->IsSmi()) { @@ -479,15 +461,16 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeGeneric( Handle<Object> key, bool deferred_comma, bool deferred_key) { - Handle<JSObject> builtins(isolate_->native_context()->builtins()); - Handle<JSFunction> builtin = - Handle<JSFunction>::cast(GetProperty(builtins, "JSONSerializeAdapter")); + Handle<JSObject> builtins(isolate_->native_context()->builtins(), isolate_); + Handle<JSFunction> builtin = Handle<JSFunction>::cast(Object::GetProperty( + isolate_, builtins, "JSONSerializeAdapter").ToHandleChecked()); Handle<Object> argv[] = { key, object }; - bool has_exception = false; - Handle<Object> result = - Execution::Call(isolate_, builtin, object, 2, argv, &has_exception); - if (has_exception) return EXCEPTION; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, result, + Execution::Call(isolate_, builtin, object, 2, argv), + EXCEPTION); if (result->IsUndefined()) return UNCHANGED; if (deferred_key) { if (key->IsSmi()) key = factory_->NumberToString(key); @@ -501,8 +484,11 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeGeneric( part_length_ = kInitialPartLength; // Allocate conservatively. Extend(); // Attach current part and allocate new part. // Attach result string to the accumulator. - Handle<String> cons = factory_->NewConsString(accumulator(), result_string); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate_, cons, EXCEPTION); + Handle<String> cons; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, cons, + factory_->NewConsString(accumulator(), result_string), + EXCEPTION); set_accumulator(cons); return SUCCESS; } @@ -510,17 +496,16 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeGeneric( BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSValue( Handle<JSValue> object) { - bool has_exception = false; String* class_name = object->class_name(); if (class_name == isolate_->heap()->String_string()) { - Handle<Object> value = - Execution::ToString(isolate_, object, &has_exception); - if (has_exception) return EXCEPTION; + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, value, Execution::ToString(isolate_, object), EXCEPTION); SerializeString(Handle<String>::cast(value)); } else if (class_name == isolate_->heap()->Number_string()) { - Handle<Object> value = - Execution::ToNumber(isolate_, object, &has_exception); - if (has_exception) return EXCEPTION; + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, value, Execution::ToNumber(isolate_, object), EXCEPTION); if (value->IsSmi()) return SerializeSmi(Smi::cast(*value)); SerializeHeapNumber(Handle<HeapNumber>::cast(value)); } else { @@ -561,22 +546,25 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArray( HandleScope handle_scope(isolate_); Result stack_push = StackPush(object); if (stack_push != SUCCESS) return stack_push; - int length = Smi::cast(object->length())->value(); + uint32_t length = 0; + CHECK(object->length()->ToArrayIndex(&length)); Append('['); switch (object->GetElementsKind()) { case FAST_SMI_ELEMENTS: { Handle<FixedArray> elements( FixedArray::cast(object->elements()), isolate_); - for (int i = 0; i < length; i++) { + for (uint32_t i = 0; i < length; i++) { if (i > 0) Append(','); SerializeSmi(Smi::cast(elements->get(i))); } break; } case FAST_DOUBLE_ELEMENTS: { + // Empty array is FixedArray but not FixedDoubleArray. + if (length == 0) break; Handle<FixedDoubleArray> elements( FixedDoubleArray::cast(object->elements()), isolate_); - for (int i = 0; i < length; i++) { + for (uint32_t i = 0; i < length; i++) { if (i > 0) Append(','); SerializeDouble(elements->get_scalar(i)); } @@ -585,7 +573,7 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArray( case FAST_ELEMENTS: { Handle<FixedArray> elements( FixedArray::cast(object->elements()), isolate_); - for (int i = 0; i < length; i++) { + for (uint32_t i = 0; i < length; i++) { if (i > 0) Append(','); Result result = SerializeElement(isolate_, @@ -617,11 +605,14 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArray( BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArraySlow( - Handle<JSArray> object, int length) { - for (int i = 0; i < length; i++) { + Handle<JSArray> object, uint32_t length) { + for (uint32_t i = 0; i < length; i++) { if (i > 0) Append(','); - Handle<Object> element = Object::GetElement(isolate_, object, i); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate_, element, EXCEPTION); + Handle<Object> element; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, element, + Object::GetElement(isolate_, object, i), + EXCEPTION); if (element->IsUndefined()) { AppendAscii("null"); } else { @@ -671,43 +662,48 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSObject( map->instance_descriptors()->GetFieldIndex(i)), isolate_); } else { - property = GetProperty(isolate_, object, key); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate_, property, EXCEPTION); + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, property, + Object::GetPropertyOrElement(object, key), + EXCEPTION); } Result result = SerializeProperty(property, comma, key); if (!comma && result == SUCCESS) comma = true; - if (result >= EXCEPTION) return result; + if (result == EXCEPTION) return result; } } else { - bool has_exception = false; - Handle<FixedArray> contents = - GetKeysInFixedArrayFor(object, LOCAL_ONLY, &has_exception); - if (has_exception) return EXCEPTION; + Handle<FixedArray> contents; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, contents, + JSReceiver::GetKeys(object, JSReceiver::LOCAL_ONLY), + EXCEPTION); for (int i = 0; i < contents->length(); i++) { Object* key = contents->get(i); Handle<String> key_handle; - Handle<Object> property; + MaybeHandle<Object> maybe_property; if (key->IsString()) { key_handle = Handle<String>(String::cast(key), isolate_); - property = GetProperty(isolate_, object, key_handle); + maybe_property = Object::GetPropertyOrElement(object, key_handle); } else { ASSERT(key->IsNumber()); key_handle = factory_->NumberToString(Handle<Object>(key, isolate_)); uint32_t index; if (key->IsSmi()) { - property = Object::GetElement( + maybe_property = Object::GetElement( isolate_, object, Smi::cast(key)->value()); } else if (key_handle->AsArrayIndex(&index)) { - property = Object::GetElement(isolate_, object, index); + maybe_property = Object::GetElement(isolate_, object, index); } else { - property = GetProperty(isolate_, object, key_handle); + maybe_property = Object::GetPropertyOrElement(object, key_handle); } } - RETURN_IF_EMPTY_HANDLE_VALUE(isolate_, property, EXCEPTION); + Handle<Object> property; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate_, property, maybe_property, EXCEPTION); Result result = SerializeProperty(property, comma, key_handle); if (!comma && result == SUCCESS) comma = true; - if (result >= EXCEPTION) return result; + if (result == EXCEPTION) return result; } } @@ -731,7 +727,8 @@ void BasicJsonStringifier::Accumulate() { set_accumulator(factory_->empty_string()); overflowed_ = true; } else { - set_accumulator(factory_->NewConsString(accumulator(), current_part_)); + set_accumulator(factory_->NewConsString(accumulator(), + current_part_).ToHandleChecked()); } } @@ -742,9 +739,11 @@ void BasicJsonStringifier::Extend() { part_length_ *= kPartLengthGrowthFactor; } if (is_ascii_) { - current_part_ = factory_->NewRawOneByteString(part_length_); + current_part_ = + factory_->NewRawOneByteString(part_length_).ToHandleChecked(); } else { - current_part_ = factory_->NewRawTwoByteString(part_length_); + current_part_ = + factory_->NewRawTwoByteString(part_length_).ToHandleChecked(); } ASSERT(!current_part_.is_null()); current_index_ = 0; @@ -754,7 +753,8 @@ void BasicJsonStringifier::Extend() { void BasicJsonStringifier::ChangeEncoding() { ShrinkCurrentPart(); Accumulate(); - current_part_ = factory_->NewRawTwoByteString(part_length_); + current_part_ = + factory_->NewRawTwoByteString(part_length_).ToHandleChecked(); ASSERT(!current_part_.is_null()); current_index_ = 0; is_ascii_ = false; @@ -864,7 +864,7 @@ Vector<const uc16> BasicJsonStringifier::GetCharVector(Handle<String> string) { void BasicJsonStringifier::SerializeString(Handle<String> object) { - object = FlattenGetString(object); + object = String::Flatten(object); if (is_ascii_) { if (object->IsOneByteRepresentationUnderneath()) { SerializeString_<true, uint8_t>(object); diff --git a/deps/v8/src/json.js b/deps/v8/src/json.js index fc4b58dec..93e38b0db 100644 --- a/deps/v8/src/json.js +++ b/deps/v8/src/json.js @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declarations have been made // in runtime.js: diff --git a/deps/v8/src/jsregexp-inl.h b/deps/v8/src/jsregexp-inl.h index 3ef07d8c5..34e60fa0a 100644 --- a/deps/v8/src/jsregexp-inl.h +++ b/deps/v8/src/jsregexp-inl.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_JSREGEXP_INL_H_ diff --git a/deps/v8/src/jsregexp.cc b/deps/v8/src/jsregexp.cc index a30fc26ff..7284c778c 100644 --- a/deps/v8/src/jsregexp.cc +++ b/deps/v8/src/jsregexp.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -66,14 +43,13 @@ namespace v8 { namespace internal { -Handle<Object> RegExpImpl::CreateRegExpLiteral(Handle<JSFunction> constructor, - Handle<String> pattern, - Handle<String> flags, - bool* has_pending_exception) { +MaybeHandle<Object> RegExpImpl::CreateRegExpLiteral( + Handle<JSFunction> constructor, + Handle<String> pattern, + Handle<String> flags) { // Call the construct code with 2 arguments. Handle<Object> argv[] = { pattern, flags }; - return Execution::New(constructor, ARRAY_SIZE(argv), argv, - has_pending_exception); + return Execution::New(constructor, ARRAY_SIZE(argv), argv); } @@ -96,10 +72,12 @@ static JSRegExp::Flags RegExpFlagsFromString(Handle<String> str) { } -static inline void ThrowRegExpException(Handle<JSRegExp> re, - Handle<String> pattern, - Handle<String> error_text, - const char* message) { +MUST_USE_RESULT +static inline MaybeHandle<Object> ThrowRegExpException( + Handle<JSRegExp> re, + Handle<String> pattern, + Handle<String> error_text, + const char* message) { Isolate* isolate = re->GetIsolate(); Factory* factory = isolate->factory(); Handle<FixedArray> elements = factory->NewFixedArray(2); @@ -107,7 +85,7 @@ static inline void ThrowRegExpException(Handle<JSRegExp> re, elements->set(1, *error_text); Handle<JSArray> array = factory->NewJSArrayWithElements(elements); Handle<Object> regexp_err = factory->NewSyntaxError(message, array); - isolate->Throw(*regexp_err); + return isolate->Throw<Object>(regexp_err); } @@ -168,15 +146,17 @@ static bool HasFewDifferentCharacters(Handle<String> pattern) { // Generic RegExp methods. Dispatches to implementation specific methods. -Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re, - Handle<String> pattern, - Handle<String> flag_str) { +MaybeHandle<Object> RegExpImpl::Compile(Handle<JSRegExp> re, + Handle<String> pattern, + Handle<String> flag_str) { Isolate* isolate = re->GetIsolate(); Zone zone(isolate); JSRegExp::Flags flags = RegExpFlagsFromString(flag_str); CompilationCache* compilation_cache = isolate->compilation_cache(); - Handle<FixedArray> cached = compilation_cache->LookupRegExp(pattern, flags); - bool in_cache = !cached.is_null(); + MaybeHandle<FixedArray> maybe_cached = + compilation_cache->LookupRegExp(pattern, flags); + Handle<FixedArray> cached; + bool in_cache = maybe_cached.ToHandle(&cached); LOG(isolate, RegExpCompileEvent(re, in_cache)); Handle<Object> result; @@ -184,18 +164,17 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re, re->set_data(*cached); return re; } - pattern = FlattenGetString(pattern); + pattern = String::Flatten(pattern); PostponeInterruptsScope postpone(isolate); RegExpCompileData parse_result; FlatStringReader reader(isolate, pattern); if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(), &parse_result, &zone)) { // Throw an exception if we fail to parse the pattern. - ThrowRegExpException(re, - pattern, - parse_result.error, - "malformed_regexp"); - return Handle<Object>::null(); + return ThrowRegExpException(re, + pattern, + parse_result.error, + "malformed_regexp"); } bool has_been_compiled = false; @@ -211,8 +190,11 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re, parse_result.capture_count == 0) { RegExpAtom* atom = parse_result.tree->AsAtom(); Vector<const uc16> atom_pattern = atom->data(); - Handle<String> atom_string = - isolate->factory()->NewStringFromTwoByte(atom_pattern); + Handle<String> atom_string; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, atom_string, + isolate->factory()->NewStringFromTwoByte(atom_pattern), + Object); if (!HasFewDifferentCharacters(atom_string)) { AtomCompile(re, pattern, flags, atom_string); has_been_compiled = true; @@ -231,23 +213,19 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re, } -Handle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp, - Handle<String> subject, - int index, - Handle<JSArray> last_match_info) { +MaybeHandle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp, + Handle<String> subject, + int index, + Handle<JSArray> last_match_info) { switch (regexp->TypeTag()) { case JSRegExp::ATOM: return AtomExec(regexp, subject, index, last_match_info); case JSRegExp::IRREGEXP: { - Handle<Object> result = - IrregexpExec(regexp, subject, index, last_match_info); - ASSERT(!result.is_null() || - regexp->GetIsolate()->has_pending_exception()); - return result; + return IrregexpExec(regexp, subject, index, last_match_info); } default: UNREACHABLE(); - return Handle<Object>::null(); + return MaybeHandle<Object>(); } } @@ -290,7 +268,7 @@ int RegExpImpl::AtomExecRaw(Handle<JSRegExp> regexp, ASSERT(0 <= index); ASSERT(index <= subject->length()); - if (!subject->IsFlat()) FlattenString(subject); + subject = String::Flatten(subject); DisallowHeapAllocation no_gc; // ensure vectors stay valid String* needle = String::cast(regexp->DataAt(JSRegExp::kAtomPatternIndex)); @@ -439,7 +417,7 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, JSRegExp::Flags flags = re->GetFlags(); Handle<String> pattern(re->Pattern()); - if (!pattern->IsFlat()) FlattenString(pattern); + pattern = String::Flatten(pattern); RegExpCompileData compile_data; FlatStringReader reader(isolate, pattern); if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(), @@ -447,10 +425,10 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, &zone)) { // Throw an exception if we fail to parse the pattern. // THIS SHOULD NOT HAPPEN. We already pre-parsed it successfully once. - ThrowRegExpException(re, - pattern, - compile_data.error, - "malformed_regexp"); + USE(ThrowRegExpException(re, + pattern, + compile_data.error, + "malformed_regexp")); return false; } RegExpEngine::CompilationResult result = @@ -464,9 +442,8 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, &zone); if (result.error_message != NULL) { // Unable to compile regexp. - Handle<String> error_message = - isolate->factory()->NewStringFromUtf8(CStrVector(result.error_message)); - ASSERT(!error_message.is_null()); + Handle<String> error_message = isolate->factory()->NewStringFromUtf8( + CStrVector(result.error_message)).ToHandleChecked(); CreateRegExpErrorObjectAndThrow(re, is_ascii, error_message, isolate); return false; } @@ -528,7 +505,7 @@ void RegExpImpl::IrregexpInitialize(Handle<JSRegExp> re, int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp, Handle<String> subject) { - if (!subject->IsFlat()) FlattenString(subject); + subject = String::Flatten(subject); // Check the asciiness of the underlying storage. bool is_ascii = subject->IsOneByteRepresentationUnderneath(); @@ -637,10 +614,10 @@ int RegExpImpl::IrregexpExecRaw(Handle<JSRegExp> regexp, } -Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> regexp, - Handle<String> subject, - int previous_index, - Handle<JSArray> last_match_info) { +MaybeHandle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> regexp, + Handle<String> subject, + int previous_index, + Handle<JSArray> last_match_info) { Isolate* isolate = regexp->GetIsolate(); ASSERT_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP); @@ -656,7 +633,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> regexp, if (required_registers < 0) { // Compiling failed with an exception. ASSERT(isolate->has_pending_exception()); - return Handle<Object>::null(); + return MaybeHandle<Object>(); } int32_t* output_registers = NULL; @@ -678,7 +655,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> regexp, } if (res == RE_EXCEPTION) { ASSERT(isolate->has_pending_exception()); - return Handle<Object>::null(); + return MaybeHandle<Object>(); } ASSERT(res == RE_FAILURE); return isolate->factory()->null_value(); @@ -6015,7 +5992,7 @@ RegExpEngine::CompilationResult RegExpEngine::Compile( // Sample some characters from the middle of the string. static const int kSampleSize = 128; - FlattenString(sample_subject); + sample_subject = String::Flatten(sample_subject); int chars_sampled = 0; int half_way = (sample_subject->length() - kSampleSize) / 2; for (int i = Max(0, half_way); diff --git a/deps/v8/src/jsregexp.h b/deps/v8/src/jsregexp.h index dfd415d5a..5366d6e13 100644 --- a/deps/v8/src/jsregexp.h +++ b/deps/v8/src/jsregexp.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_JSREGEXP_H_ #define V8_JSREGEXP_H_ @@ -55,10 +32,10 @@ class RegExpImpl { // Creates a regular expression literal in the old space. // This function calls the garbage collector if necessary. - static Handle<Object> CreateRegExpLiteral(Handle<JSFunction> constructor, - Handle<String> pattern, - Handle<String> flags, - bool* has_pending_exception); + MUST_USE_RESULT static MaybeHandle<Object> CreateRegExpLiteral( + Handle<JSFunction> constructor, + Handle<String> pattern, + Handle<String> flags); // Returns a string representation of a regular expression. // Implements RegExp.prototype.toString, see ECMA-262 section 15.10.6.4. @@ -69,16 +46,18 @@ class RegExpImpl { // generic data and choice of implementation - as well as what // the implementation wants to store in the data field. // Returns false if compilation fails. - static Handle<Object> Compile(Handle<JSRegExp> re, - Handle<String> pattern, - Handle<String> flags); + MUST_USE_RESULT static MaybeHandle<Object> Compile( + Handle<JSRegExp> re, + Handle<String> pattern, + Handle<String> flags); // See ECMA-262 section 15.10.6.2. // This function calls the garbage collector if necessary. - static Handle<Object> Exec(Handle<JSRegExp> regexp, - Handle<String> subject, - int index, - Handle<JSArray> lastMatchInfo); + MUST_USE_RESULT static MaybeHandle<Object> Exec( + Handle<JSRegExp> regexp, + Handle<String> subject, + int index, + Handle<JSArray> lastMatchInfo); // Prepares a JSRegExp object with Irregexp-specific data. static void IrregexpInitialize(Handle<JSRegExp> re, @@ -133,10 +112,11 @@ class RegExpImpl { // On a successful match, the result is a JSArray containing // captured positions. On a failure, the result is the null value. // Returns an empty handle in case of an exception. - static Handle<Object> IrregexpExec(Handle<JSRegExp> regexp, - Handle<String> subject, - int index, - Handle<JSArray> lastMatchInfo); + MUST_USE_RESULT static MaybeHandle<Object> IrregexpExec( + Handle<JSRegExp> regexp, + Handle<String> subject, + int index, + Handle<JSArray> lastMatchInfo); // Set last match info. If match is NULL, then setting captures is omitted. static Handle<JSArray> SetLastMatchInfo(Handle<JSArray> last_match_info, diff --git a/deps/v8/src/lazy-instance.h b/deps/v8/src/lazy-instance.h index fc03f4d12..b760f1fb6 100644 --- a/deps/v8/src/lazy-instance.h +++ b/deps/v8/src/lazy-instance.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // The LazyInstance<Type, Traits> class manages a single instance of Type, // which will be lazily created on the first time it's accessed. This class is @@ -66,8 +43,8 @@ // LAZY_INSTANCE_INITIALIZER; // // WARNINGS: -// - This implementation of LazyInstance is NOT THREAD-SAFE by default. See -// ThreadSafeInitOnceTrait declared below for that. +// - This implementation of LazyInstance IS THREAD-SAFE by default. See +// SingleThreadInitOnceTrait if you don't care about thread safety. // - Lazy initialization comes with a cost. Make sure that you don't use it on // critical path. Consider adding your initialization code to a function // which is explicitly called once. @@ -227,7 +204,7 @@ struct LazyInstanceImpl { template <typename T, typename CreateTrait = DefaultConstructTrait<T>, - typename InitOnceTrait = SingleThreadInitOnceTrait, + typename InitOnceTrait = ThreadSafeInitOnceTrait, typename DestroyTrait = LeakyInstanceTrait<T> > struct LazyStaticInstance { typedef LazyInstanceImpl<T, StaticallyAllocatedInstanceTrait<T>, @@ -237,7 +214,7 @@ struct LazyStaticInstance { template <typename T, typename CreateTrait = DefaultConstructTrait<T>, - typename InitOnceTrait = SingleThreadInitOnceTrait, + typename InitOnceTrait = ThreadSafeInitOnceTrait, typename DestroyTrait = LeakyInstanceTrait<T> > struct LazyInstance { // A LazyInstance is a LazyStaticInstance. @@ -248,7 +225,7 @@ struct LazyInstance { template <typename T, typename CreateTrait = DefaultCreateTrait<T>, - typename InitOnceTrait = SingleThreadInitOnceTrait, + typename InitOnceTrait = ThreadSafeInitOnceTrait, typename DestroyTrait = LeakyInstanceTrait<T> > struct LazyDynamicInstance { typedef LazyInstanceImpl<T, DynamicallyAllocatedInstanceTrait<T>, diff --git a/deps/v8/src/libplatform/default-platform.cc b/deps/v8/src/libplatform/default-platform.cc index 1e21ca4e8..6ff8830fb 100644 --- a/deps/v8/src/libplatform/default-platform.cc +++ b/deps/v8/src/libplatform/default-platform.cc @@ -1,32 +1,10 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "default-platform.h" +#include <algorithm> #include <queue> // TODO(jochen): We should have our own version of checks.h. @@ -39,6 +17,9 @@ namespace v8 { namespace internal { +const int DefaultPlatform::kMaxThreadPoolSize = 4; + + DefaultPlatform::DefaultPlatform() : initialized_(false), thread_pool_size_(0) {} @@ -60,7 +41,8 @@ void DefaultPlatform::SetThreadPoolSize(int thread_pool_size) { ASSERT(thread_pool_size >= 0); if (thread_pool_size < 1) thread_pool_size = CPU::NumberOfProcessorsOnline(); - thread_pool_size_ = Max(Min(thread_pool_size, kMaxThreadPoolSize), 1); + thread_pool_size_ = + std::max(std::min(thread_pool_size, kMaxThreadPoolSize), 1); } diff --git a/deps/v8/src/libplatform/default-platform.h b/deps/v8/src/libplatform/default-platform.h index 5c4883234..f887eb32e 100644 --- a/deps/v8/src/libplatform/default-platform.h +++ b/deps/v8/src/libplatform/default-platform.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIBPLATFORM_DEFAULT_PLATFORM_H_ #define V8_LIBPLATFORM_DEFAULT_PLATFORM_H_ @@ -31,8 +8,7 @@ #include <vector> #include "../../include/v8-platform.h" -// TODO(jochen): We should have our own version of globals.h. -#include "../globals.h" +#include "../base/macros.h" #include "../platform/mutex.h" #include "task-queue.h" @@ -59,7 +35,7 @@ class DefaultPlatform : public Platform { Task *task) V8_OVERRIDE; private: - static const int kMaxThreadPoolSize = 4; + static const int kMaxThreadPoolSize; Mutex lock_; bool initialized_; diff --git a/deps/v8/src/libplatform/task-queue.cc b/deps/v8/src/libplatform/task-queue.cc index 1ea31eb26..37cf1353e 100644 --- a/deps/v8/src/libplatform/task-queue.cc +++ b/deps/v8/src/libplatform/task-queue.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "task-queue.h" diff --git a/deps/v8/src/libplatform/task-queue.h b/deps/v8/src/libplatform/task-queue.h index a3182d353..8b9137b03 100644 --- a/deps/v8/src/libplatform/task-queue.h +++ b/deps/v8/src/libplatform/task-queue.h @@ -1,37 +1,13 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIBPLATFORM_TASK_QUEUE_H_ #define V8_LIBPLATFORM_TASK_QUEUE_H_ #include <queue> -// TODO(jochen): We should have our own version of globals.h. -#include "../globals.h" +#include "../base/macros.h" #include "../platform/mutex.h" #include "../platform/semaphore.h" diff --git a/deps/v8/src/libplatform/worker-thread.cc b/deps/v8/src/libplatform/worker-thread.cc index cca8a9719..e7d8ec763 100644 --- a/deps/v8/src/libplatform/worker-thread.cc +++ b/deps/v8/src/libplatform/worker-thread.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "worker-thread.h" diff --git a/deps/v8/src/libplatform/worker-thread.h b/deps/v8/src/libplatform/worker-thread.h index f0b9019f5..b9d7fdabe 100644 --- a/deps/v8/src/libplatform/worker-thread.h +++ b/deps/v8/src/libplatform/worker-thread.h @@ -1,37 +1,13 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIBPLATFORM_WORKER_THREAD_H_ #define V8_LIBPLATFORM_WORKER_THREAD_H_ #include <queue> -// TODO(jochen): We should have our own version of globals.h. -#include "../globals.h" +#include "../base/macros.h" #include "../platform.h" namespace v8 { diff --git a/deps/v8/src/list-inl.h b/deps/v8/src/list-inl.h index a80aa6743..4a18d9820 100644 --- a/deps/v8/src/list-inl.h +++ b/deps/v8/src/list-inl.h @@ -1,29 +1,6 @@ // Copyright 2006-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIST_INL_H_ #define V8_LIST_INL_H_ diff --git a/deps/v8/src/list.h b/deps/v8/src/list.h index 71aa82195..1029f493f 100644 --- a/deps/v8/src/list.h +++ b/deps/v8/src/list.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIST_H_ #define V8_LIST_H_ diff --git a/deps/v8/src/lithium-allocator-inl.h b/deps/v8/src/lithium-allocator-inl.h index 7c0cba7fb..1b9de0eed 100644 --- a/deps/v8/src/lithium-allocator-inl.h +++ b/deps/v8/src/lithium-allocator-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LITHIUM_ALLOCATOR_INL_H_ #define V8_LITHIUM_ALLOCATOR_INL_H_ diff --git a/deps/v8/src/lithium-allocator.cc b/deps/v8/src/lithium-allocator.cc index 9987161d4..c6e52ed82 100644 --- a/deps/v8/src/lithium-allocator.cc +++ b/deps/v8/src/lithium-allocator.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "lithium-allocator-inl.h" @@ -69,7 +46,8 @@ UsePosition::UsePosition(LifetimePosition pos, register_beneficial_(true) { if (operand_ != NULL && operand_->IsUnallocated()) { LUnallocated* unalloc = LUnallocated::cast(operand_); - requires_reg_ = unalloc->HasRegisterPolicy(); + requires_reg_ = unalloc->HasRegisterPolicy() || + unalloc->HasDoubleRegisterPolicy(); register_beneficial_ = !unalloc->HasAnyPolicy(); } ASSERT(pos_.IsValid()); @@ -986,7 +964,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { } } - if (instr->ClobbersDoubleRegisters()) { + if (instr->ClobbersDoubleRegisters(isolate())) { for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { if (output == NULL || !output->IsDoubleRegister() || output->index() != i) { @@ -1028,6 +1006,15 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { } Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); Define(curr_position, temp, NULL); + + if (temp->IsUnallocated()) { + LUnallocated* temp_unalloc = LUnallocated::cast(temp); + if (temp_unalloc->HasDoubleRegisterPolicy()) { + double_artificial_registers_.Add( + temp_unalloc->virtual_register() - first_artificial_register_, + zone()); + } + } } } } @@ -1118,7 +1105,6 @@ bool LAllocator::Allocate(LChunk* chunk) { void LAllocator::MeetRegisterConstraints() { LAllocatorPhase phase("L_Register constraints", this); - first_artificial_register_ = next_virtual_register_; const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); for (int i = 0; i < blocks->length(); ++i) { HBasicBlock* block = blocks->at(i); diff --git a/deps/v8/src/lithium-allocator.h b/deps/v8/src/lithium-allocator.h index 8a1476a04..83ba9afb6 100644 --- a/deps/v8/src/lithium-allocator.h +++ b/deps/v8/src/lithium-allocator.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LITHIUM_ALLOCATOR_H_ #define V8_LITHIUM_ALLOCATOR_H_ diff --git a/deps/v8/src/lithium-codegen.cc b/deps/v8/src/lithium-codegen.cc index be0ff8371..0d841b7e8 100644 --- a/deps/v8/src/lithium-codegen.cc +++ b/deps/v8/src/lithium-codegen.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -122,6 +99,30 @@ bool LCodeGenBase::GenerateBody() { } +void LCodeGenBase::CheckEnvironmentUsage() { +#ifdef DEBUG + bool dead_block = false; + for (int i = 0; i < instructions_->length(); i++) { + LInstruction* instr = instructions_->at(i); + HValue* hval = instr->hydrogen_value(); + if (instr->IsLabel()) dead_block = LLabel::cast(instr)->HasReplacement(); + if (dead_block || !hval->block()->IsReachable()) continue; + + HInstruction* hinstr = HInstruction::cast(hval); + if (!hinstr->CanDeoptimize() && instr->HasEnvironment()) { + V8_Fatal(__FILE__, __LINE__, "CanDeoptimize is wrong for %s (%s)\n", + hinstr->Mnemonic(), instr->Mnemonic()); + } + + if (instr->HasEnvironment() && !instr->environment()->has_been_used()) { + V8_Fatal(__FILE__, __LINE__, "unused environment for %s (%s)\n", + hinstr->Mnemonic(), instr->Mnemonic()); + } + } +#endif +} + + void LCodeGenBase::Comment(const char* format, ...) { if (!FLAG_code_comments) return; char buffer[4 * KB]; @@ -149,6 +150,17 @@ int LCodeGenBase::GetNextEmittedBlock() const { } +static void AddWeakObjectToCodeDependency(Isolate* isolate, + Handle<Object> object, + Handle<Code> code) { + Heap* heap = isolate->heap(); + heap->EnsureWeakObjectToCodeTable(); + Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); + dep = DependentCode::Insert(dep, DependentCode::kWeakCodeGroup, code); + heap->AddWeakObjectToCodeDependency(object, dep); +} + + void LCodeGenBase::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) { ASSERT(code->is_optimized_code()); ZoneList<Handle<Map> > maps(1, zone()); @@ -176,6 +188,10 @@ void LCodeGenBase::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) { } } } + if (FLAG_enable_ool_constant_pool) { + code->constant_pool()->set_weak_object_state( + ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE); + } #ifdef VERIFY_HEAP // This disables verification of weak embedded objects after full GC. // AddDependentCode can cause a GC, which would observe the state where @@ -183,15 +199,32 @@ void LCodeGenBase::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) { NoWeakObjectVerificationScope disable_verification_of_embedded_objects; #endif for (int i = 0; i < maps.length(); i++) { - maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code); + Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code); } for (int i = 0; i < objects.length(); i++) { - AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code); + AddWeakObjectToCodeDependency(isolate(), objects.at(i), code); } for (int i = 0; i < cells.length(); i++) { - AddWeakObjectToCodeDependency(isolate()->heap(), cells.at(i), code); + AddWeakObjectToCodeDependency(isolate(), cells.at(i), code); } } +void LCodeGenBase::Abort(BailoutReason reason) { + info()->set_bailout_reason(reason); + status_ = ABORTED; +} + + +void LCodeGenBase::AddDeprecationDependency(Handle<Map> map) { + if (map->is_deprecated()) return Abort(kMapBecameDeprecated); + chunk_->AddDeprecationDependency(map); +} + + +void LCodeGenBase::AddStabilityDependency(Handle<Map> map) { + if (!map->is_stable()) return Abort(kMapBecameUnstable); + chunk_->AddStabilityDependency(map); +} + } } // namespace v8::internal diff --git a/deps/v8/src/lithium-codegen.h b/deps/v8/src/lithium-codegen.h index 3e8d471ea..28a5ab16e 100644 --- a/deps/v8/src/lithium-codegen.h +++ b/deps/v8/src/lithium-codegen.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LITHIUM_CODEGEN_H_ #define V8_LITHIUM_CODEGEN_H_ @@ -68,6 +45,11 @@ class LCodeGenBase BASE_EMBEDDED { void RegisterWeakObjectsInOptimizedCode(Handle<Code> code); + // Check that an environment assigned via AssignEnvironment is actually being + // used. Redundant assignments keep things alive longer than necessary, and + // consequently lead to worse code, so it's important to minimize this. + void CheckEnvironmentUsage(); + protected: enum Status { UNUSED, @@ -90,6 +72,12 @@ class LCodeGenBase BASE_EMBEDDED { bool is_generating() const { return status_ == GENERATING; } bool is_done() const { return status_ == DONE; } bool is_aborted() const { return status_ == ABORTED; } + + void Abort(BailoutReason reason); + + // Methods for code dependencies. + void AddDeprecationDependency(Handle<Map> map); + void AddStabilityDependency(Handle<Map> map); }; diff --git a/deps/v8/src/lithium.cc b/deps/v8/src/lithium.cc index 8753ff14a..2265353f4 100644 --- a/deps/v8/src/lithium.cc +++ b/deps/v8/src/lithium.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "lithium.h" @@ -85,6 +62,9 @@ void LOperand::PrintTo(StringStream* stream) { case LUnallocated::MUST_HAVE_REGISTER: stream->Add("(R)"); break; + case LUnallocated::MUST_HAVE_DOUBLE_REGISTER: + stream->Add("(D)"); + break; case LUnallocated::WRITABLE_REGISTER: stream->Add("(WR)"); break; @@ -256,7 +236,9 @@ LChunk::LChunk(CompilationInfo* info, HGraph* graph) graph_(graph), instructions_(32, graph->zone()), pointer_maps_(8, graph->zone()), - inlined_closures_(1, graph->zone()) { + inlined_closures_(1, graph->zone()), + deprecation_dependencies_(MapLess(), MapAllocator(graph->zone())), + stability_dependencies_(MapLess(), MapAllocator(graph->zone())) { } @@ -395,6 +377,27 @@ Representation LChunk::LookupLiteralRepresentation( } +void LChunk::CommitDependencies(Handle<Code> code) const { + for (MapSet::const_iterator it = deprecation_dependencies_.begin(), + iend = deprecation_dependencies_.end(); it != iend; ++it) { + Handle<Map> map = *it; + ASSERT(!map->is_deprecated()); + ASSERT(map->CanBeDeprecated()); + Map::AddDependentCode(map, DependentCode::kTransitionGroup, code); + } + + for (MapSet::const_iterator it = stability_dependencies_.begin(), + iend = stability_dependencies_.end(); it != iend; ++it) { + Handle<Map> map = *it; + ASSERT(map->is_stable()); + ASSERT(map->CanTransition()); + Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code); + } + + info_->CommitDependencies(code); +} + + LChunk* LChunk::NewChunk(HGraph* graph) { DisallowHandleAllocation no_handles; DisallowHeapAllocation no_gc; @@ -432,11 +435,13 @@ Handle<Code> LChunk::Codegen() { MarkEmptyBlocks(); if (generator.GenerateCode()) { + generator.CheckEnvironmentUsage(); CodeGenerator::MakeCodePrologue(info(), "optimized"); Code::Flags flags = info()->flags(); Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, flags, info()); generator.FinishCode(code); + CommitDependencies(code); code->set_is_crankshafted(true); void* jit_handler_data = assembler.positions_recorder()->DetachJITHandlerData(); diff --git a/deps/v8/src/lithium.h b/deps/v8/src/lithium.h index 8ae5b879d..650bae692 100644 --- a/deps/v8/src/lithium.h +++ b/deps/v8/src/lithium.h @@ -1,36 +1,16 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LITHIUM_H_ #define V8_LITHIUM_H_ +#include <set> + #include "allocation.h" #include "hydrogen.h" #include "safepoint-table.h" +#include "zone-allocator.h" namespace v8 { namespace internal { @@ -101,6 +81,7 @@ class LUnallocated : public LOperand { FIXED_REGISTER, FIXED_DOUBLE_REGISTER, MUST_HAVE_REGISTER, + MUST_HAVE_DOUBLE_REGISTER, WRITABLE_REGISTER, SAME_AS_FIRST_INPUT }; @@ -210,6 +191,10 @@ class LUnallocated : public LOperand { extended_policy() == WRITABLE_REGISTER || extended_policy() == MUST_HAVE_REGISTER); } + bool HasDoubleRegisterPolicy() const { + return basic_policy() == EXTENDED_POLICY && + extended_policy() == MUST_HAVE_DOUBLE_REGISTER; + } bool HasSameAsInputPolicy() const { return basic_policy() == EXTENDED_POLICY && extended_policy() == SAME_AS_FIRST_INPUT; @@ -426,7 +411,8 @@ class LEnvironment V8_FINAL : public ZoneObject { object_mapping_(0, zone), outer_(outer), entry_(entry), - zone_(zone) { } + zone_(zone), + has_been_used_(false) { } Handle<JSFunction> closure() const { return closure_; } FrameType frame_type() const { return frame_type_; } @@ -442,6 +428,9 @@ class LEnvironment V8_FINAL : public ZoneObject { HEnterInlined* entry() { return entry_; } Zone* zone() const { return zone_; } + bool has_been_used() const { return has_been_used_; } + void set_has_been_used() { has_been_used_ = true; } + void AddValue(LOperand* operand, Representation representation, bool is_uint32) { @@ -541,6 +530,7 @@ class LEnvironment V8_FINAL : public ZoneObject { LEnvironment* outer_; HEnterInlined* entry_; Zone* zone_; + bool has_been_used_; }; @@ -660,6 +650,20 @@ class LChunk : public ZoneObject { inlined_closures_.Add(closure, zone()); } + void AddDeprecationDependency(Handle<Map> map) { + ASSERT(!map->is_deprecated()); + if (!map->CanBeDeprecated()) return; + ASSERT(!info_->IsStub()); + deprecation_dependencies_.insert(map); + } + + void AddStabilityDependency(Handle<Map> map) { + ASSERT(map->is_stable()); + if (!map->CanTransition()) return; + ASSERT(!info_->IsStub()); + stability_dependencies_.insert(map); + } + Zone* zone() const { return info_->zone(); } Handle<Code> Codegen(); @@ -675,12 +679,20 @@ class LChunk : public ZoneObject { int spill_slot_count_; private: + typedef std::less<Handle<Map> > MapLess; + typedef zone_allocator<Handle<Map> > MapAllocator; + typedef std::set<Handle<Map>, MapLess, MapAllocator> MapSet; + + void CommitDependencies(Handle<Code> code) const; + CompilationInfo* info_; HGraph* const graph_; BitVector* allocated_double_registers_; ZoneList<LInstruction*> instructions_; ZoneList<LPointerMap*> pointer_maps_; ZoneList<Handle<JSFunction> > inlined_closures_; + MapSet deprecation_dependencies_; + MapSet stability_dependencies_; }; diff --git a/deps/v8/src/liveedit-debugger.js b/deps/v8/src/liveedit-debugger.js index 4618eda36..021a4f052 100644 --- a/deps/v8/src/liveedit-debugger.js +++ b/deps/v8/src/liveedit-debugger.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // LiveEdit feature implementation. The script should be executed after // debug-debugger.js. diff --git a/deps/v8/src/liveedit.cc b/deps/v8/src/liveedit.cc index 5eae1073a..e6bb4b29a 100644 --- a/deps/v8/src/liveedit.cc +++ b/deps/v8/src/liveedit.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -45,20 +22,13 @@ namespace v8 { namespace internal { - -#ifdef ENABLE_DEBUGGER_SUPPORT - - void SetElementSloppy(Handle<JSObject> object, uint32_t index, Handle<Object> value) { // Ignore return value from SetElement. It can only be a failure if there // are element setters causing exceptions and the debugger context has none // of these. - Handle<Object> no_failure = - JSObject::SetElement(object, index, value, NONE, SLOPPY); - ASSERT(!no_failure.is_null()); - USE(no_failure); + JSObject::SetElement(object, index, value, NONE, SLOPPY).Assert(); } @@ -434,7 +404,7 @@ class TokensCompareOutput : public Comparator::Output { class LineEndsWrapper { public: explicit LineEndsWrapper(Handle<String> string) - : ends_array_(CalculateLineEnds(string, false)), + : ends_array_(String::CalculateLineEnds(string, false)), string_len_(string->length()) { } int length() { @@ -585,8 +555,8 @@ class TokenizingLineArrayCompareOutput : public SubrangableOutput { Handle<JSArray> LiveEdit::CompareStrings(Handle<String> s1, Handle<String> s2) { - s1 = FlattenGetString(s1); - s2 = FlattenGetString(s2); + s1 = String::Flatten(s1); + s2 = String::Flatten(s2); LineEndsWrapper line_ends1(s1); LineEndsWrapper line_ends2(s2); @@ -635,168 +605,94 @@ static int GetArrayLength(Handle<JSArray> array) { } -// Simple helper class that creates more or less typed structures over -// JSArray object. This is an adhoc method of passing structures from C++ -// to JavaScript. -template<typename S> -class JSArrayBasedStruct { - public: - static S Create(Isolate* isolate) { - Factory* factory = isolate->factory(); - Handle<JSArray> array = factory->NewJSArray(S::kSize_); - return S(array); - } - static S cast(Object* object) { - JSArray* array = JSArray::cast(object); - Handle<JSArray> array_handle(array); - return S(array_handle); - } - explicit JSArrayBasedStruct(Handle<JSArray> array) : array_(array) { - } - Handle<JSArray> GetJSArray() { - return array_; - } - Isolate* isolate() const { - return array_->GetIsolate(); - } +void FunctionInfoWrapper::SetInitialProperties(Handle<String> name, + int start_position, + int end_position, + int param_num, + int literal_count, + int slot_count, + int parent_index) { + HandleScope scope(isolate()); + this->SetField(kFunctionNameOffset_, name); + this->SetSmiValueField(kStartPositionOffset_, start_position); + this->SetSmiValueField(kEndPositionOffset_, end_position); + this->SetSmiValueField(kParamNumOffset_, param_num); + this->SetSmiValueField(kLiteralNumOffset_, literal_count); + this->SetSmiValueField(kSlotNumOffset_, slot_count); + this->SetSmiValueField(kParentIndexOffset_, parent_index); +} - protected: - void SetField(int field_position, Handle<Object> value) { - SetElementSloppy(array_, field_position, value); - } - void SetSmiValueField(int field_position, int value) { - SetElementSloppy(array_, - field_position, - Handle<Smi>(Smi::FromInt(value), isolate())); - } - Handle<Object> GetField(int field_position) { - return Object::GetElementNoExceptionThrown( - isolate(), array_, field_position); - } - int GetSmiValueField(int field_position) { - Handle<Object> res = GetField(field_position); - return Handle<Smi>::cast(res)->value(); - } - private: - Handle<JSArray> array_; -}; +void FunctionInfoWrapper::SetFunctionCode(Handle<Code> function_code, + Handle<HeapObject> code_scope_info) { + Handle<JSValue> code_wrapper = WrapInJSValue(function_code); + this->SetField(kCodeOffset_, code_wrapper); + Handle<JSValue> scope_wrapper = WrapInJSValue(code_scope_info); + this->SetField(kCodeScopeInfoOffset_, scope_wrapper); +} -// Represents some function compilation details. This structure will be used -// from JavaScript. It contains Code object, which is kept wrapped -// into a BlindReference for sanitizing reasons. -class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> { - public: - explicit FunctionInfoWrapper(Handle<JSArray> array) - : JSArrayBasedStruct<FunctionInfoWrapper>(array) { - } - void SetInitialProperties(Handle<String> name, int start_position, - int end_position, int param_num, - int literal_count, int parent_index) { - HandleScope scope(isolate()); - this->SetField(kFunctionNameOffset_, name); - this->SetSmiValueField(kStartPositionOffset_, start_position); - this->SetSmiValueField(kEndPositionOffset_, end_position); - this->SetSmiValueField(kParamNumOffset_, param_num); - this->SetSmiValueField(kLiteralNumOffset_, literal_count); - this->SetSmiValueField(kParentIndexOffset_, parent_index); - } - void SetFunctionCode(Handle<Code> function_code, - Handle<HeapObject> code_scope_info) { - Handle<JSValue> code_wrapper = WrapInJSValue(function_code); - this->SetField(kCodeOffset_, code_wrapper); - Handle<JSValue> scope_wrapper = WrapInJSValue(code_scope_info); - this->SetField(kCodeScopeInfoOffset_, scope_wrapper); - } - void SetFunctionScopeInfo(Handle<Object> scope_info_array) { - this->SetField(kFunctionScopeInfoOffset_, scope_info_array); - } - void SetSharedFunctionInfo(Handle<SharedFunctionInfo> info) { - Handle<JSValue> info_holder = WrapInJSValue(info); - this->SetField(kSharedFunctionInfoOffset_, info_holder); - } - int GetLiteralCount() { - return this->GetSmiValueField(kLiteralNumOffset_); - } - int GetParentIndex() { - return this->GetSmiValueField(kParentIndexOffset_); - } - Handle<Code> GetFunctionCode() { - Handle<Object> element = this->GetField(kCodeOffset_); +void FunctionInfoWrapper::SetSharedFunctionInfo( + Handle<SharedFunctionInfo> info) { + Handle<JSValue> info_holder = WrapInJSValue(info); + this->SetField(kSharedFunctionInfoOffset_, info_holder); +} + + +Handle<Code> FunctionInfoWrapper::GetFunctionCode() { + Handle<Object> element = this->GetField(kCodeOffset_); + Handle<JSValue> value_wrapper = Handle<JSValue>::cast(element); + Handle<Object> raw_result = UnwrapJSValue(value_wrapper); + CHECK(raw_result->IsCode()); + return Handle<Code>::cast(raw_result); +} + + +Handle<FixedArray> FunctionInfoWrapper::GetFeedbackVector() { + Handle<Object> element = this->GetField(kSharedFunctionInfoOffset_); + Handle<FixedArray> result; + if (element->IsJSValue()) { Handle<JSValue> value_wrapper = Handle<JSValue>::cast(element); Handle<Object> raw_result = UnwrapJSValue(value_wrapper); - CHECK(raw_result->IsCode()); - return Handle<Code>::cast(raw_result); - } - Handle<Object> GetCodeScopeInfo() { - Handle<Object> element = this->GetField(kCodeScopeInfoOffset_); - return UnwrapJSValue(Handle<JSValue>::cast(element)); - } - int GetStartPosition() { - return this->GetSmiValueField(kStartPositionOffset_); - } - int GetEndPosition() { - return this->GetSmiValueField(kEndPositionOffset_); + Handle<SharedFunctionInfo> shared = + Handle<SharedFunctionInfo>::cast(raw_result); + result = Handle<FixedArray>(shared->feedback_vector(), isolate()); + CHECK_EQ(result->length(), GetSlotCount()); + } else { + // Scripts may never have a SharedFunctionInfo created, so + // create a type feedback vector here. + int slot_count = GetSlotCount(); + result = isolate()->factory()->NewTypeFeedbackVector(slot_count); } - - private: - static const int kFunctionNameOffset_ = 0; - static const int kStartPositionOffset_ = 1; - static const int kEndPositionOffset_ = 2; - static const int kParamNumOffset_ = 3; - static const int kCodeOffset_ = 4; - static const int kCodeScopeInfoOffset_ = 5; - static const int kFunctionScopeInfoOffset_ = 6; - static const int kParentIndexOffset_ = 7; - static const int kSharedFunctionInfoOffset_ = 8; - static const int kLiteralNumOffset_ = 9; - static const int kSize_ = 10; - - friend class JSArrayBasedStruct<FunctionInfoWrapper>; -}; + return result; +} -// Wraps SharedFunctionInfo along with some of its fields for passing it -// back to JavaScript. SharedFunctionInfo object itself is additionally -// wrapped into BlindReference for sanitizing reasons. -class SharedInfoWrapper : public JSArrayBasedStruct<SharedInfoWrapper> { - public: - static bool IsInstance(Handle<JSArray> array) { - return array->length() == Smi::FromInt(kSize_) && - Object::GetElementNoExceptionThrown( - array->GetIsolate(), array, kSharedInfoOffset_)->IsJSValue(); - } +Handle<Object> FunctionInfoWrapper::GetCodeScopeInfo() { + Handle<Object> element = this->GetField(kCodeScopeInfoOffset_); + return UnwrapJSValue(Handle<JSValue>::cast(element)); +} - explicit SharedInfoWrapper(Handle<JSArray> array) - : JSArrayBasedStruct<SharedInfoWrapper>(array) { - } - void SetProperties(Handle<String> name, int start_position, int end_position, - Handle<SharedFunctionInfo> info) { - HandleScope scope(isolate()); - this->SetField(kFunctionNameOffset_, name); - Handle<JSValue> info_holder = WrapInJSValue(info); - this->SetField(kSharedInfoOffset_, info_holder); - this->SetSmiValueField(kStartPositionOffset_, start_position); - this->SetSmiValueField(kEndPositionOffset_, end_position); - } - Handle<SharedFunctionInfo> GetInfo() { - Handle<Object> element = this->GetField(kSharedInfoOffset_); - Handle<JSValue> value_wrapper = Handle<JSValue>::cast(element); - return UnwrapSharedFunctionInfoFromJSValue(value_wrapper); - } +void SharedInfoWrapper::SetProperties(Handle<String> name, + int start_position, + int end_position, + Handle<SharedFunctionInfo> info) { + HandleScope scope(isolate()); + this->SetField(kFunctionNameOffset_, name); + Handle<JSValue> info_holder = WrapInJSValue(info); + this->SetField(kSharedInfoOffset_, info_holder); + this->SetSmiValueField(kStartPositionOffset_, start_position); + this->SetSmiValueField(kEndPositionOffset_, end_position); +} - private: - static const int kFunctionNameOffset_ = 0; - static const int kStartPositionOffset_ = 1; - static const int kEndPositionOffset_ = 2; - static const int kSharedInfoOffset_ = 3; - static const int kSize_ = 4; - friend class JSArrayBasedStruct<SharedInfoWrapper>; -}; +Handle<SharedFunctionInfo> SharedInfoWrapper::GetInfo() { + Handle<Object> element = this->GetField(kSharedInfoOffset_); + Handle<JSValue> value_wrapper = Handle<JSValue>::cast(element); + return UnwrapSharedFunctionInfoFromJSValue(value_wrapper); +} class FunctionInfoListener { @@ -813,6 +709,7 @@ class FunctionInfoListener { info.SetInitialProperties(fun->name(), fun->start_position(), fun->end_position(), fun->parameter_count(), fun->materialized_literal_count(), + fun->slot_count(), current_parent_index_); current_parent_index_ = len_; SetElementSloppy(result_, len_, info.GetJSArray()); @@ -823,8 +720,8 @@ class FunctionInfoListener { HandleScope scope(isolate()); FunctionInfoWrapper info = FunctionInfoWrapper::cast( - *Object::GetElementNoExceptionThrown( - isolate(), result_, current_parent_index_)); + *Object::GetElement( + isolate(), result_, current_parent_index_).ToHandleChecked()); current_parent_index_ = info.GetParentIndex(); } @@ -833,8 +730,8 @@ class FunctionInfoListener { void FunctionCode(Handle<Code> function_code) { FunctionInfoWrapper info = FunctionInfoWrapper::cast( - *Object::GetElementNoExceptionThrown( - isolate(), result_, current_parent_index_)); + *Object::GetElement( + isolate(), result_, current_parent_index_).ToHandleChecked()); info.SetFunctionCode(function_code, Handle<HeapObject>(isolate()->heap()->null_value())); } @@ -848,14 +745,13 @@ class FunctionInfoListener { } FunctionInfoWrapper info = FunctionInfoWrapper::cast( - *Object::GetElementNoExceptionThrown( - isolate(), result_, current_parent_index_)); + *Object::GetElement( + isolate(), result_, current_parent_index_).ToHandleChecked()); info.SetFunctionCode(Handle<Code>(shared->code()), Handle<HeapObject>(shared->scope_info())); info.SetSharedFunctionInfo(shared); - Handle<Object> scope_info_list(SerializeFunctionScope(scope, zone), - isolate()); + Handle<Object> scope_info_list = SerializeFunctionScope(scope, zone); info.SetFunctionScopeInfo(scope_info_list); } @@ -864,9 +760,7 @@ class FunctionInfoListener { private: Isolate* isolate() const { return result_->GetIsolate(); } - Object* SerializeFunctionScope(Scope* scope, Zone* zone) { - HandleScope handle_scope(isolate()); - + Handle<Object> SerializeFunctionScope(Scope* scope, Zone* zone) { Handle<JSArray> scope_info_list = isolate()->factory()->NewJSArray(10); int scope_info_length = 0; @@ -875,6 +769,7 @@ class FunctionInfoListener { // scopes of this chain. Scope* current_scope = scope; while (current_scope != NULL) { + HandleScope handle_scope(isolate()); ZoneList<Variable*> stack_list(current_scope->StackLocalCount(), zone); ZoneList<Variable*> context_list( current_scope->ContextLocalCount(), zone); @@ -901,7 +796,7 @@ class FunctionInfoListener { current_scope = current_scope->outer_scope(); } - return *scope_info_list; + return scope_info_list; } Handle<JSArray> result_; @@ -910,8 +805,8 @@ class FunctionInfoListener { }; -JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script, - Handle<String> source) { +MaybeHandle<JSArray> LiveEdit::GatherCompileInfo(Handle<Script> script, + Handle<String> source) { Isolate* isolate = script->GetIsolate(); FunctionInfoListener listener(isolate); @@ -933,8 +828,7 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script, // A logical 'catch' section. Handle<JSObject> rethrow_exception; if (isolate->has_pending_exception()) { - Handle<Object> exception(isolate->pending_exception()->ToObjectChecked(), - isolate); + Handle<Object> exception(isolate->pending_exception(), isolate); MessageLocation message_location = isolate->GetMessageLocation(); isolate->clear_pending_message(); @@ -954,13 +848,14 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script, Handle<Smi> start_pos( Smi::FromInt(message_location.start_pos()), isolate); Handle<Smi> end_pos(Smi::FromInt(message_location.end_pos()), isolate); - Handle<JSValue> script_obj = GetScriptWrapper(message_location.script()); + Handle<JSObject> script_obj = + Script::GetWrapper(message_location.script()); JSReceiver::SetProperty( - rethrow_exception, start_pos_key, start_pos, NONE, SLOPPY); + rethrow_exception, start_pos_key, start_pos, NONE, SLOPPY).Assert(); JSReceiver::SetProperty( - rethrow_exception, end_pos_key, end_pos, NONE, SLOPPY); + rethrow_exception, end_pos_key, end_pos, NONE, SLOPPY).Assert(); JSReceiver::SetProperty( - rethrow_exception, script_obj_key, script_obj, NONE, SLOPPY); + rethrow_exception, script_obj_key, script_obj, NONE, SLOPPY).Assert(); } } @@ -969,10 +864,9 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script, script->set_source(*original_source); if (rethrow_exception.is_null()) { - return *(listener.GetResult()); + return listener.GetResult(); } else { - isolate->Throw(*rethrow_exception); - return 0; + return isolate->Throw<JSArray>(rethrow_exception); } } @@ -984,7 +878,7 @@ void LiveEdit::WrapSharedFunctionInfos(Handle<JSArray> array) { for (int i = 0; i < len; i++) { Handle<SharedFunctionInfo> info( SharedFunctionInfo::cast( - *Object::GetElementNoExceptionThrown(isolate, array, i))); + *Object::GetElement(isolate, array, i).ToHandleChecked())); SharedInfoWrapper info_wrapper = SharedInfoWrapper::Create(isolate); Handle<String> name_handle(String::cast(info->name())); info_wrapper.SetProperties(name_handle, info->start_position(), @@ -1261,15 +1155,10 @@ static void DeoptimizeDependentFunctions(SharedFunctionInfo* function_info) { } -MaybeObject* LiveEdit::ReplaceFunctionCode( +void LiveEdit::ReplaceFunctionCode( Handle<JSArray> new_compile_info_array, Handle<JSArray> shared_info_array) { Isolate* isolate = new_compile_info_array->GetIsolate(); - HandleScope scope(isolate); - - if (!SharedInfoWrapper::IsInstance(shared_info_array)) { - return isolate->ThrowIllegalOperation(); - } FunctionInfoWrapper compile_info_wrapper(new_compile_info_array); SharedInfoWrapper shared_info_wrapper(shared_info_array); @@ -1286,6 +1175,10 @@ MaybeObject* LiveEdit::ReplaceFunctionCode( shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info)); } shared_info->DisableOptimization(kLiveEdit); + // Update the type feedback vector + Handle<FixedArray> feedback_vector = + compile_info_wrapper.GetFeedbackVector(); + shared_info->set_feedback_vector(*feedback_vector); } if (shared_info->debug_info()->IsDebugInfo()) { @@ -1307,27 +1200,15 @@ MaybeObject* LiveEdit::ReplaceFunctionCode( DeoptimizeDependentFunctions(*shared_info); isolate->compilation_cache()->Remove(shared_info); - - return isolate->heap()->undefined_value(); } -MaybeObject* LiveEdit::FunctionSourceUpdated( - Handle<JSArray> shared_info_array) { - Isolate* isolate = shared_info_array->GetIsolate(); - HandleScope scope(isolate); - - if (!SharedInfoWrapper::IsInstance(shared_info_array)) { - return isolate->ThrowIllegalOperation(); - } - +void LiveEdit::FunctionSourceUpdated(Handle<JSArray> shared_info_array) { SharedInfoWrapper shared_info_wrapper(shared_info_array); Handle<SharedFunctionInfo> shared_info = shared_info_wrapper.GetInfo(); DeoptimizeDependentFunctions(*shared_info); - isolate->compilation_cache()->Remove(shared_info); - - return isolate->heap()->undefined_value(); + shared_info_array->GetIsolate()->compilation_cache()->Remove(shared_info); } @@ -1359,21 +1240,21 @@ static int TranslatePosition(int original_position, // TODO(635): binary search may be used here for (int i = 0; i < array_len; i += 3) { HandleScope scope(isolate); - Handle<Object> element = Object::GetElementNoExceptionThrown( - isolate, position_change_array, i); + Handle<Object> element = Object::GetElement( + isolate, position_change_array, i).ToHandleChecked(); CHECK(element->IsSmi()); int chunk_start = Handle<Smi>::cast(element)->value(); if (original_position < chunk_start) { break; } - element = Object::GetElementNoExceptionThrown( - isolate, position_change_array, i + 1); + element = Object::GetElement( + isolate, position_change_array, i + 1).ToHandleChecked(); CHECK(element->IsSmi()); int chunk_end = Handle<Smi>::cast(element)->value(); // Position mustn't be inside a chunk. ASSERT(original_position >= chunk_end); - element = Object::GetElementNoExceptionThrown( - isolate, position_change_array, i + 2); + element = Object::GetElement( + isolate, position_change_array, i + 2).ToHandleChecked(); CHECK(element->IsSmi()); int chunk_changed_end = Handle<Smi>::cast(element)->value(); position_diff = chunk_changed_end - chunk_end; @@ -1504,12 +1385,8 @@ static Handle<Code> PatchPositionsInCode( } -MaybeObject* LiveEdit::PatchFunctionPositions( - Handle<JSArray> shared_info_array, Handle<JSArray> position_change_array) { - if (!SharedInfoWrapper::IsInstance(shared_info_array)) { - return shared_info_array->GetIsolate()->ThrowIllegalOperation(); - } - +void LiveEdit::PatchFunctionPositions(Handle<JSArray> shared_info_array, + Handle<JSArray> position_change_array) { SharedInfoWrapper shared_info_wrapper(shared_info_array); Handle<SharedFunctionInfo> info = shared_info_wrapper.GetInfo(); @@ -1540,8 +1417,6 @@ MaybeObject* LiveEdit::PatchFunctionPositions( ReplaceCodeObject(Handle<Code>(info->code()), patched_code); } } - - return info->GetIsolate()->heap()->undefined_value(); } @@ -1568,9 +1443,9 @@ static Handle<Script> CreateScriptCopy(Handle<Script> original) { } -Object* LiveEdit::ChangeScriptSource(Handle<Script> original_script, - Handle<String> new_source, - Handle<Object> old_script_name) { +Handle<Object> LiveEdit::ChangeScriptSource(Handle<Script> original_script, + Handle<String> new_source, + Handle<Object> old_script_name) { Isolate* isolate = original_script->GetIsolate(); Handle<Object> old_script_object; if (old_script_name->IsString()) { @@ -1588,7 +1463,7 @@ Object* LiveEdit::ChangeScriptSource(Handle<Script> original_script, // Drop line ends so that they will be recalculated. original_script->set_line_ends(isolate->heap()->undefined_value()); - return *old_script_object; + return old_script_object; } @@ -1630,7 +1505,7 @@ static bool CheckActivation(Handle<JSArray> shared_info_array, for (int i = 0; i < len; i++) { HandleScope scope(isolate); Handle<Object> element = - Object::GetElementNoExceptionThrown(isolate, shared_info_array, i); + Object::GetElement(isolate, shared_info_array, i).ToHandleChecked(); Handle<JSValue> jsvalue = Handle<JSValue>::cast(element); Handle<SharedFunctionInfo> shared = UnwrapSharedFunctionInfoFromJSValue(jsvalue); @@ -1694,7 +1569,7 @@ static const char* DropFrames(Vector<StackFrame*> frames, *mode = Debug::FRAME_DROPPED_IN_IC_CALL; frame_has_padding = Debug::FramePaddingLayout::kIsSupported; } else if (pre_top_frame_code == - isolate->debug()->debug_break_slot()) { + isolate->builtins()->builtin(Builtins::kSlot_DebugBreak)) { // OK, we can drop debug break slot. *mode = Debug::FRAME_DROPPED_IN_DEBUG_SLOT_CALL; frame_has_padding = Debug::FramePaddingLayout::kIsSupported; @@ -1947,7 +1822,7 @@ static const char* DropActivationsInActiveThread( // Replace "blocked on active" with "replaced on active" status. for (int i = 0; i < array_len; i++) { Handle<Object> obj = - Object::GetElementNoExceptionThrown(isolate, result, i); + Object::GetElement(isolate, result, i).ToHandleChecked(); if (*obj == Smi::FromInt(LiveEdit::FUNCTION_BLOCKED_ON_ACTIVE_STACK)) { Handle<Object> replaced( Smi::FromInt(LiveEdit::FUNCTION_REPLACED_ON_ACTIVE_STACK), isolate); @@ -2013,8 +1888,8 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations( DropActivationsInActiveThread(shared_info_array, result, do_drop); if (error_message != NULL) { // Add error message as an array extra element. - Handle<String> str = isolate->factory()->NewStringFromAscii( - CStrVector(error_message)); + Handle<String> str = + isolate->factory()->NewStringFromAsciiChecked(error_message); SetElementSloppy(result, len, str); } return result; @@ -2101,36 +1976,4 @@ bool LiveEditFunctionTracker::IsActive(Isolate* isolate) { return isolate->active_function_info_listener() != NULL; } - -#else // ENABLE_DEBUGGER_SUPPORT - -// This ifdef-else-endif section provides working or stub implementation of -// LiveEditFunctionTracker. -LiveEditFunctionTracker::LiveEditFunctionTracker(Isolate* isolate, - FunctionLiteral* fun) { -} - - -LiveEditFunctionTracker::~LiveEditFunctionTracker() { -} - - -void LiveEditFunctionTracker::RecordFunctionInfo( - Handle<SharedFunctionInfo> info, FunctionLiteral* lit, - Zone* zone) { -} - - -void LiveEditFunctionTracker::RecordRootFunctionInfo(Handle<Code> code) { -} - - -bool LiveEditFunctionTracker::IsActive(Isolate* isolate) { - return false; -} - -#endif // ENABLE_DEBUGGER_SUPPORT - - - } } // namespace v8::internal diff --git a/deps/v8/src/liveedit.h b/deps/v8/src/liveedit.h index 0efbb95cc..5be63ac0a 100644 --- a/deps/v8/src/liveedit.h +++ b/deps/v8/src/liveedit.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LIVEEDIT_H_ #define V8_LIVEEDIT_H_ @@ -75,39 +52,36 @@ class LiveEditFunctionTracker { static bool IsActive(Isolate* isolate); private: -#ifdef ENABLE_DEBUGGER_SUPPORT Isolate* isolate_; -#endif }; -#ifdef ENABLE_DEBUGGER_SUPPORT class LiveEdit : AllStatic { public: - static JSArray* GatherCompileInfo(Handle<Script> script, - Handle<String> source); + MUST_USE_RESULT static MaybeHandle<JSArray> GatherCompileInfo( + Handle<Script> script, + Handle<String> source); static void WrapSharedFunctionInfos(Handle<JSArray> array); - MUST_USE_RESULT static MaybeObject* ReplaceFunctionCode( - Handle<JSArray> new_compile_info_array, - Handle<JSArray> shared_info_array); + static void ReplaceFunctionCode(Handle<JSArray> new_compile_info_array, + Handle<JSArray> shared_info_array); - static MaybeObject* FunctionSourceUpdated(Handle<JSArray> shared_info_array); + static void FunctionSourceUpdated(Handle<JSArray> shared_info_array); // Updates script field in FunctionSharedInfo. static void SetFunctionScript(Handle<JSValue> function_wrapper, Handle<Object> script_handle); - MUST_USE_RESULT static MaybeObject* PatchFunctionPositions( - Handle<JSArray> shared_info_array, Handle<JSArray> position_change_array); + static void PatchFunctionPositions(Handle<JSArray> shared_info_array, + Handle<JSArray> position_change_array); // For a script updates its source field. If old_script_name is provided // (i.e. is a String), also creates a copy of the script with its original // source and sends notification to debugger. - static Object* ChangeScriptSource(Handle<Script> original_script, - Handle<String> new_source, - Handle<Object> old_script_name); + static Handle<Object> ChangeScriptSource(Handle<Script> original_script, + Handle<String> new_source, + Handle<Object> old_script_name); // In a code of a parent function replaces original function as embedded // object with a substitution one. @@ -175,9 +149,161 @@ class Comparator { Output* result_writer); }; -#endif // ENABLE_DEBUGGER_SUPPORT +// Simple helper class that creates more or less typed structures over +// JSArray object. This is an adhoc method of passing structures from C++ +// to JavaScript. +template<typename S> +class JSArrayBasedStruct { + public: + static S Create(Isolate* isolate) { + Factory* factory = isolate->factory(); + Handle<JSArray> array = factory->NewJSArray(S::kSize_); + return S(array); + } + + static S cast(Object* object) { + JSArray* array = JSArray::cast(object); + Handle<JSArray> array_handle(array); + return S(array_handle); + } + + explicit JSArrayBasedStruct(Handle<JSArray> array) : array_(array) { + } + + Handle<JSArray> GetJSArray() { + return array_; + } + + Isolate* isolate() const { + return array_->GetIsolate(); + } + + protected: + void SetField(int field_position, Handle<Object> value) { + JSObject::SetElement(array_, field_position, value, NONE, SLOPPY).Assert(); + } + + void SetSmiValueField(int field_position, int value) { + SetField(field_position, Handle<Smi>(Smi::FromInt(value), isolate())); + } + + Handle<Object> GetField(int field_position) { + return Object::GetElement( + isolate(), array_, field_position).ToHandleChecked(); + } + + int GetSmiValueField(int field_position) { + Handle<Object> res = GetField(field_position); + return Handle<Smi>::cast(res)->value(); + } + + private: + Handle<JSArray> array_; +}; + + +// Represents some function compilation details. This structure will be used +// from JavaScript. It contains Code object, which is kept wrapped +// into a BlindReference for sanitizing reasons. +class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> { + public: + explicit FunctionInfoWrapper(Handle<JSArray> array) + : JSArrayBasedStruct<FunctionInfoWrapper>(array) { + } + + void SetInitialProperties(Handle<String> name, + int start_position, + int end_position, + int param_num, + int literal_count, + int slot_count, + int parent_index); + + void SetFunctionCode(Handle<Code> function_code, + Handle<HeapObject> code_scope_info); + + void SetFunctionScopeInfo(Handle<Object> scope_info_array) { + this->SetField(kFunctionScopeInfoOffset_, scope_info_array); + } + + void SetSharedFunctionInfo(Handle<SharedFunctionInfo> info); + + int GetLiteralCount() { + return this->GetSmiValueField(kLiteralNumOffset_); + } + + int GetParentIndex() { + return this->GetSmiValueField(kParentIndexOffset_); + } + + Handle<Code> GetFunctionCode(); + + Handle<FixedArray> GetFeedbackVector(); + + Handle<Object> GetCodeScopeInfo(); + + int GetStartPosition() { + return this->GetSmiValueField(kStartPositionOffset_); + } + + int GetEndPosition() { return this->GetSmiValueField(kEndPositionOffset_); } + + int GetSlotCount() { + return this->GetSmiValueField(kSlotNumOffset_); + } + + private: + static const int kFunctionNameOffset_ = 0; + static const int kStartPositionOffset_ = 1; + static const int kEndPositionOffset_ = 2; + static const int kParamNumOffset_ = 3; + static const int kCodeOffset_ = 4; + static const int kCodeScopeInfoOffset_ = 5; + static const int kFunctionScopeInfoOffset_ = 6; + static const int kParentIndexOffset_ = 7; + static const int kSharedFunctionInfoOffset_ = 8; + static const int kLiteralNumOffset_ = 9; + static const int kSlotNumOffset_ = 10; + static const int kSize_ = 11; + + friend class JSArrayBasedStruct<FunctionInfoWrapper>; +}; + + +// Wraps SharedFunctionInfo along with some of its fields for passing it +// back to JavaScript. SharedFunctionInfo object itself is additionally +// wrapped into BlindReference for sanitizing reasons. +class SharedInfoWrapper : public JSArrayBasedStruct<SharedInfoWrapper> { + public: + static bool IsInstance(Handle<JSArray> array) { + return array->length() == Smi::FromInt(kSize_) && + Object::GetElement(array->GetIsolate(), array, kSharedInfoOffset_) + .ToHandleChecked()->IsJSValue(); + } + + explicit SharedInfoWrapper(Handle<JSArray> array) + : JSArrayBasedStruct<SharedInfoWrapper>(array) { + } + + void SetProperties(Handle<String> name, + int start_position, + int end_position, + Handle<SharedFunctionInfo> info); + + Handle<SharedFunctionInfo> GetInfo(); + + private: + static const int kFunctionNameOffset_ = 0; + static const int kStartPositionOffset_ = 1; + static const int kEndPositionOffset_ = 2; + static const int kSharedInfoOffset_ = 3; + static const int kSize_ = 4; + + friend class JSArrayBasedStruct<SharedInfoWrapper>; +}; + } } // namespace v8::internal #endif /* V*_LIVEEDIT_H_ */ diff --git a/deps/v8/src/log-inl.h b/deps/v8/src/log-inl.h index 7f653cb72..d6781678c 100644 --- a/deps/v8/src/log-inl.h +++ b/deps/v8/src/log-inl.h @@ -1,29 +1,6 @@ // Copyright 2006-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LOG_INL_H_ #define V8_LOG_INL_H_ diff --git a/deps/v8/src/log-utils.cc b/deps/v8/src/log-utils.cc index 909d4a513..687578847 100644 --- a/deps/v8/src/log-utils.cc +++ b/deps/v8/src/log-utils.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -51,7 +28,6 @@ void Log::Initialize(const char* log_file_name) { // --log-all enables all the log flags. if (FLAG_log_all) { - FLAG_log_runtime = true; FLAG_log_api = true; FLAG_log_code = true; FLAG_log_gc = true; diff --git a/deps/v8/src/log-utils.h b/deps/v8/src/log-utils.h index f1a21e2cc..deb3f7c48 100644 --- a/deps/v8/src/log-utils.h +++ b/deps/v8/src/log-utils.h @@ -1,29 +1,6 @@ // Copyright 2006-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LOG_UTILS_H_ #define V8_LOG_UTILS_H_ @@ -45,10 +22,10 @@ class Log { void stop() { is_stopped_ = true; } static bool InitLogAtStart() { - return FLAG_log || FLAG_log_runtime || FLAG_log_api - || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect - || FLAG_log_regexp || FLAG_ll_prof || FLAG_perf_basic_prof - || FLAG_perf_jit_prof || FLAG_log_internal_timer_events; + return FLAG_log || FLAG_log_api || FLAG_log_code || FLAG_log_gc + || FLAG_log_handles || FLAG_log_suspect || FLAG_log_regexp + || FLAG_ll_prof || FLAG_perf_basic_prof || FLAG_perf_jit_prof + || FLAG_log_internal_timer_events; } // Frees all resources acquired in Initialize and Open... functions. diff --git a/deps/v8/src/log.cc b/deps/v8/src/log.cc index 942170c28..88dae56b7 100644 --- a/deps/v8/src/log.cc +++ b/deps/v8/src/log.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> @@ -1150,7 +1127,8 @@ void Logger::LogRegExpSource(Handle<JSRegExp> regexp) { // (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"") Log::MessageBuilder msg(log_); - Handle<Object> source = GetProperty(regexp, "source"); + Handle<Object> source = Object::GetProperty( + isolate_, regexp, "source").ToHandleChecked(); if (!source->IsString()) { msg.Append("no source"); return; @@ -1168,17 +1146,20 @@ void Logger::LogRegExpSource(Handle<JSRegExp> regexp) { msg.Append('/'); // global flag - Handle<Object> global = GetProperty(regexp, "global"); + Handle<Object> global = Object::GetProperty( + isolate_, regexp, "global").ToHandleChecked(); if (global->IsTrue()) { msg.Append('g'); } // ignorecase flag - Handle<Object> ignorecase = GetProperty(regexp, "ignoreCase"); + Handle<Object> ignorecase = Object::GetProperty( + isolate_, regexp, "ignoreCase").ToHandleChecked(); if (ignorecase->IsTrue()) { msg.Append('i'); } // multiline flag - Handle<Object> multiline = GetProperty(regexp, "multiline"); + Handle<Object> multiline = Object::GetProperty( + isolate_, regexp, "multiline").ToHandleChecked(); if (multiline->IsTrue()) { msg.Append('m'); } @@ -1197,47 +1178,6 @@ void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) { } -void Logger::LogRuntime(Vector<const char> format, - Handle<JSArray> args) { - if (!log_->IsEnabled() || !FLAG_log_runtime) return; - Log::MessageBuilder msg(log_); - for (int i = 0; i < format.length(); i++) { - char c = format[i]; - if (c == '%' && i <= format.length() - 2) { - i++; - ASSERT('0' <= format[i] && format[i] <= '9'); - // No exception expected when getting an element from an array literal. - Handle<Object> obj = - Object::GetElementNoExceptionThrown(isolate_, args, format[i] - '0'); - i++; - switch (format[i]) { - case 's': - msg.AppendDetailed(String::cast(*obj), false); - break; - case 'S': - msg.AppendDetailed(String::cast(*obj), true); - break; - case 'r': - Logger::LogRegExpSource(Handle<JSRegExp>::cast(obj)); - break; - case 'x': - msg.Append("0x%x", Smi::cast(*obj)->value()); - break; - case 'i': - msg.Append("%i", Smi::cast(*obj)->value()); - break; - default: - UNREACHABLE(); - } - } else { - msg.Append(c); - } - } - msg.Append('\n'); - msg.WriteToLogFile(); -} - - void Logger::ApiIndexedSecurityCheck(uint32_t index) { if (!log_->IsEnabled() || !FLAG_log_api) return; ApiEvent("api,check-security,%u\n", index); @@ -1872,6 +1812,10 @@ void Logger::LogCodeObject(Object* object) { description = "A load IC from the snapshot"; tag = Logger::LOAD_IC_TAG; break; + case Code::CALL_IC: + description = "A call IC from the snapshot"; + tag = Logger::CALL_IC_TAG; + break; case Code::STORE_IC: description = "A store IC from the snapshot"; tag = Logger::STORE_IC_TAG; @@ -1904,9 +1848,9 @@ void Logger::LogExistingFunction(Handle<SharedFunctionInfo> shared, Handle<String> func_name(shared->DebugName()); if (shared->script()->IsScript()) { Handle<Script> script(Script::cast(shared->script())); - int line_num = GetScriptLineNumber(script, shared->start_position()) + 1; + int line_num = Script::GetLineNumber(script, shared->start_position()) + 1; int column_num = - GetScriptColumnNumber(script, shared->start_position()) + 1; + Script::GetColumnNumber(script, shared->start_position()) + 1; if (script->name()->IsString()) { Handle<String> script_name(String::cast(script->name())); if (line_num > 0) { diff --git a/deps/v8/src/log.h b/deps/v8/src/log.h index c01aca273..b1a41e949 100644 --- a/deps/v8/src/log.h +++ b/deps/v8/src/log.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_LOG_H_ #define V8_LOG_H_ @@ -144,6 +121,7 @@ struct TickSample; V(KEYED_STORE_POLYMORPHIC_IC_TAG, "KeyedStorePolymorphicIC") \ V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC") \ V(LAZY_COMPILE_TAG, "LazyCompile") \ + V(CALL_IC_TAG, "CallIC") \ V(LOAD_IC_TAG, "LoadIC") \ V(LOAD_POLYMORPHIC_IC_TAG, "LoadPolymorphicIC") \ V(REG_EXP_TAG, "RegExp") \ @@ -348,9 +326,6 @@ class Logger { void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache); - // Log an event reported from generated code - void LogRuntime(Vector<const char> format, Handle<JSArray> args); - bool is_logging() { return is_logging_; } diff --git a/deps/v8/src/macro-assembler.h b/deps/v8/src/macro-assembler.h index b05868c01..bd22b93c7 100644 --- a/deps/v8/src/macro-assembler.h +++ b/deps/v8/src/macro-assembler.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MACRO_ASSEMBLER_H_ #define V8_MACRO_ASSEMBLER_H_ diff --git a/deps/v8/src/macros.py b/deps/v8/src/macros.py index 0b69e6b80..3eb906f74 100644 --- a/deps/v8/src/macros.py +++ b/deps/v8/src/macros.py @@ -87,6 +87,10 @@ const kMaxYear = 1000000; const kMinMonth = -10000000; const kMaxMonth = 10000000; +# Strict mode flags for passing to %SetProperty +const kSloppyMode = 0; +const kStrictMode = 1; + # Native cache ids. const STRING_TO_REGEXP_CACHE_ID = 0; @@ -97,7 +101,7 @@ const STRING_TO_REGEXP_CACHE_ID = 0; # values of 'bar'. macro IS_NULL(arg) = (arg === null); macro IS_NULL_OR_UNDEFINED(arg) = (arg == null); -macro IS_UNDEFINED(arg) = (typeof(arg) === 'undefined'); +macro IS_UNDEFINED(arg) = (arg === (void 0)); macro IS_NUMBER(arg) = (typeof(arg) === 'number'); macro IS_STRING(arg) = (typeof(arg) === 'string'); macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean'); @@ -272,3 +276,8 @@ const PROPERTY_ATTRIBUTES_NONE = 0; const PROPERTY_ATTRIBUTES_STRING = 8; const PROPERTY_ATTRIBUTES_SYMBOLIC = 16; const PROPERTY_ATTRIBUTES_PRIVATE_SYMBOL = 32; + +# Use for keys, values and entries iterators. +const ITERATOR_KIND_KEYS = 1; +const ITERATOR_KIND_VALUES = 2; +const ITERATOR_KIND_ENTRIES = 3; diff --git a/deps/v8/src/mark-compact-inl.h b/deps/v8/src/mark-compact-inl.h index a42e0f7f1..608a0ec98 100644 --- a/deps/v8/src/mark-compact-inl.h +++ b/deps/v8/src/mark-compact-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MARK_COMPACT_INL_H_ #define V8_MARK_COMPACT_INL_H_ diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc index f04a8bcb9..ec8e94179 100644 --- a/deps/v8/src/mark-compact.cc +++ b/deps/v8/src/mark-compact.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -40,6 +17,7 @@ #include "mark-compact.h" #include "objects-visiting.h" #include "objects-visiting-inl.h" +#include "spaces-inl.h" #include "stub-cache.h" #include "sweeper-thread.h" @@ -449,7 +427,7 @@ void MarkCompactCollector::CollectGarbage() { #ifdef VERIFY_HEAP if (heap()->weak_embedded_objects_verification_enabled()) { - VerifyWeakEmbeddedObjectsInOptimizedCode(); + VerifyWeakEmbeddedObjectsInCode(); } if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { VerifyOmittedMapChecks(); @@ -510,13 +488,13 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() { } -void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() { +void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { HeapObjectIterator code_iterator(heap()->code_space()); for (HeapObject* obj = code_iterator.Next(); obj != NULL; obj = code_iterator.Next()) { Code* code = Code::cast(obj); - if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; + if (!code->is_optimized_code() && !code->is_weak_stub()) continue; if (WillBeDeoptimized(code)) continue; code->VerifyEmbeddedObjectsDependency(); } @@ -595,11 +573,8 @@ class MarkCompactCollector::SweeperTask : public v8::Task { void MarkCompactCollector::StartSweeperThreads() { - // TODO(hpayer): This check is just used for debugging purpose and - // should be removed or turned into an assert after investigating the - // crash in concurrent sweeping. - CHECK(free_list_old_pointer_space_.get()->IsEmpty()); - CHECK(free_list_old_data_space_.get()->IsEmpty()); + ASSERT(free_list_old_pointer_space_.get()->IsEmpty()); + ASSERT(free_list_old_data_space_.get()->IsEmpty()); sweeping_pending_ = true; for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { isolate()->sweeper_threads()[i]->StartSweeping(); @@ -627,14 +602,30 @@ void MarkCompactCollector::WaitUntilSweepingCompleted() { } ParallelSweepSpacesComplete(); sweeping_pending_ = false; - RefillFreeLists(heap()->paged_space(OLD_DATA_SPACE)); - RefillFreeLists(heap()->paged_space(OLD_POINTER_SPACE)); + RefillFreeList(heap()->paged_space(OLD_DATA_SPACE)); + RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE)); heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes(); heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes(); } -intptr_t MarkCompactCollector::RefillFreeLists(PagedSpace* space) { +bool MarkCompactCollector::IsSweepingCompleted() { + for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { + if (!isolate()->sweeper_threads()[i]->SweepingCompleted()) { + return false; + } + } + if (FLAG_job_based_sweeping) { + if (!pending_sweeper_jobs_semaphore_.WaitFor(TimeDelta::FromSeconds(0))) { + return false; + } + pending_sweeper_jobs_semaphore_.Signal(); + } + return true; +} + + +void MarkCompactCollector::RefillFreeList(PagedSpace* space) { FreeList* free_list; if (space == heap()->old_pointer_space()) { @@ -644,13 +635,12 @@ intptr_t MarkCompactCollector::RefillFreeLists(PagedSpace* space) { } else { // Any PagedSpace might invoke RefillFreeLists, so we need to make sure // to only refill them for old data and pointer spaces. - return 0; + return; } intptr_t freed_bytes = space->free_list()->Concatenate(free_list); space->AddToAccountingStats(freed_bytes); space->DecrementUnsweptFreeBytes(freed_bytes); - return freed_bytes; } @@ -2070,8 +2060,8 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage( } // Promotion failed. Just migrate object to another semispace. - MaybeObject* allocation = new_space->AllocateRaw(size); - if (allocation->IsFailure()) { + AllocationResult allocation = new_space->AllocateRaw(size); + if (allocation.IsRetry()) { if (!new_space->AddFreshPage()) { // Shouldn't happen. We are sweeping linearly, and to-space // has the same number of pages as from-space, so there is @@ -2079,9 +2069,9 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage( UNREACHABLE(); } allocation = new_space->AllocateRaw(size); - ASSERT(!allocation->IsFailure()); + ASSERT(!allocation.IsRetry()); } - Object* target = allocation->ToObjectUnchecked(); + Object* target = allocation.ToObjectChecked(); MigrateObject(HeapObject::cast(target), object, @@ -2146,7 +2136,10 @@ void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { StringTable* string_table = heap()->string_table(); // Mark the string table itself. MarkBit string_table_mark = Marking::MarkBitFrom(string_table); - SetMark(string_table, string_table_mark); + if (!string_table_mark.Get()) { + // String table could have already been marked by visiting the handles list. + SetMark(string_table, string_table_mark); + } // Explicitly mark the prefix. string_table->IteratePrefix(visitor); ProcessMarkingDeque(); @@ -2583,7 +2576,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { if (map_mark.Get()) { ClearNonLiveDependentCode(map->dependent_code()); } else { - ClearAndDeoptimizeDependentCode(map->dependent_code()); + ClearDependentCode(map->dependent_code()); map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); } } @@ -2638,7 +2631,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { } ClearNonLiveDependentCode(DependentCode::cast(value)); } else { - ClearAndDeoptimizeDependentCode(DependentCode::cast(value)); + ClearDependentCode(DependentCode::cast(value)); table->set(key_index, heap_->the_hole_value()); table->set(value_index, heap_->the_hole_value()); } @@ -2708,56 +2701,102 @@ void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, } -void MarkCompactCollector::ClearAndDeoptimizeDependentCode( +void MarkCompactCollector::ClearDependentICList(Object* head) { + Object* current = head; + Object* undefined = heap()->undefined_value(); + while (current != undefined) { + Code* code = Code::cast(current); + if (IsMarked(code)) { + ASSERT(code->is_weak_stub()); + IC::InvalidateMaps(code); + } + current = code->next_code_link(); + code->set_next_code_link(undefined); + } +} + + +void MarkCompactCollector::ClearDependentCode( DependentCode* entries) { DisallowHeapAllocation no_allocation; DependentCode::GroupStartIndexes starts(entries); int number_of_entries = starts.number_of_entries(); if (number_of_entries == 0) return; - for (int i = 0; i < number_of_entries; i++) { + int g = DependentCode::kWeakICGroup; + if (starts.at(g) != starts.at(g + 1)) { + int i = starts.at(g); + ASSERT(i + 1 == starts.at(g + 1)); + Object* head = entries->object_at(i); + ClearDependentICList(head); + } + g = DependentCode::kWeakCodeGroup; + for (int i = starts.at(g); i < starts.at(g + 1); i++) { // If the entry is compilation info then the map must be alive, - // and ClearAndDeoptimizeDependentCode shouldn't be called. + // and ClearDependentCode shouldn't be called. ASSERT(entries->is_code_at(i)); Code* code = entries->code_at(i); - if (IsMarked(code) && !code->marked_for_deoptimization()) { code->set_marked_for_deoptimization(true); code->InvalidateEmbeddedObjects(); have_code_to_deoptimize_ = true; } + } + for (int i = 0; i < number_of_entries; i++) { entries->clear_at(i); } } -void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { - DisallowHeapAllocation no_allocation; - DependentCode::GroupStartIndexes starts(entries); - int number_of_entries = starts.number_of_entries(); - if (number_of_entries == 0) return; - int new_number_of_entries = 0; - // Go through all groups, remove dead codes and compact. - for (int g = 0; g < DependentCode::kGroupCount; g++) { - int group_number_of_entries = 0; - for (int i = starts.at(g); i < starts.at(g + 1); i++) { +int MarkCompactCollector::ClearNonLiveDependentCodeInGroup( + DependentCode* entries, int group, int start, int end, int new_start) { + int survived = 0; + if (group == DependentCode::kWeakICGroup) { + // Dependent weak IC stubs form a linked list and only the head is stored + // in the dependent code array. + if (start != end) { + ASSERT(start + 1 == end); + Object* old_head = entries->object_at(start); + MarkCompactWeakObjectRetainer retainer; + Object* head = VisitWeakList<Code>(heap(), old_head, &retainer, true); + entries->set_object_at(new_start, head); + Object** slot = entries->slot_at(new_start); + RecordSlot(slot, slot, head); + // We do not compact this group even if the head is undefined, + // more dependent ICs are likely to be added later. + survived = 1; + } + } else { + for (int i = start; i < end; i++) { Object* obj = entries->object_at(i); ASSERT(obj->IsCode() || IsMarked(obj)); if (IsMarked(obj) && (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) { - if (new_number_of_entries + group_number_of_entries != i) { - entries->set_object_at( - new_number_of_entries + group_number_of_entries, obj); + if (new_start + survived != i) { + entries->set_object_at(new_start + survived, obj); } - Object** slot = entries->slot_at(new_number_of_entries + - group_number_of_entries); + Object** slot = entries->slot_at(new_start + survived); RecordSlot(slot, slot, obj); - group_number_of_entries++; + survived++; } } - entries->set_number_of_entries( - static_cast<DependentCode::DependencyGroup>(g), - group_number_of_entries); - new_number_of_entries += group_number_of_entries; + } + entries->set_number_of_entries( + static_cast<DependentCode::DependencyGroup>(group), survived); + return survived; +} + + +void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { + DisallowHeapAllocation no_allocation; + DependentCode::GroupStartIndexes starts(entries); + int number_of_entries = starts.number_of_entries(); + if (number_of_entries == 0) return; + int new_number_of_entries = 0; + // Go through all groups, remove dead codes and compact. + for (int g = 0; g < DependentCode::kGroupCount; g++) { + int survived = ClearNonLiveDependentCodeInGroup( + entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries); + new_number_of_entries += survived; } for (int i = new_number_of_entries; i < number_of_entries; i++) { entries->clear_at(i); @@ -2988,25 +3027,30 @@ class PointersUpdatingVisitor: public ObjectVisitor { }; -static void UpdatePointer(HeapObject** p, HeapObject* object) { - ASSERT(*p == object); - - Address old_addr = object->address(); - - Address new_addr = Memory::Address_at(old_addr); +static void UpdatePointer(HeapObject** address, HeapObject* object) { + Address new_addr = Memory::Address_at(object->address()); // The new space sweep will overwrite the map word of dead objects // with NULL. In this case we do not need to transfer this entry to // the store buffer which we are rebuilding. + // We perform the pointer update with a no barrier compare-and-swap. The + // compare and swap may fail in the case where the pointer update tries to + // update garbage memory which was concurrently accessed by the sweeper. if (new_addr != NULL) { - *p = HeapObject::FromAddress(new_addr); + NoBarrier_CompareAndSwap( + reinterpret_cast<AtomicWord*>(address), + reinterpret_cast<AtomicWord>(object), + reinterpret_cast<AtomicWord>(HeapObject::FromAddress(new_addr))); } else { // We have to zap this pointer, because the store buffer may overflow later, // and then we have to scan the entire heap and we don't want to find // spurious newspace pointers in the old space. // TODO(mstarzinger): This was changed to a sentinel value to track down // rare crashes, change it back to Smi::FromInt(0) later. - *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0x0f100d00 >> 1)); // flood + NoBarrier_CompareAndSwap( + reinterpret_cast<AtomicWord*>(address), + reinterpret_cast<AtomicWord>(object), + reinterpret_cast<AtomicWord>(Smi::FromInt(0x0f100d00 >> 1))); } } @@ -3025,17 +3069,15 @@ static String* UpdateReferenceInExternalStringTableEntry(Heap* heap, bool MarkCompactCollector::TryPromoteObject(HeapObject* object, int object_size) { - // TODO(hpayer): Replace that check with an assert. - CHECK(object_size <= Page::kMaxRegularHeapObjectSize); + ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); OldSpace* target_space = heap()->TargetSpace(object); ASSERT(target_space == heap()->old_pointer_space() || target_space == heap()->old_data_space()); - Object* result; - MaybeObject* maybe_result = target_space->AllocateRaw(object_size); - if (maybe_result->ToObject(&result)) { - HeapObject* target = HeapObject::cast(result); + HeapObject* target; + AllocationResult allocation = target_space->AllocateRaw(object_size); + if (allocation.To(&target)) { MigrateObject(target, object, object_size, @@ -3106,19 +3148,15 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { int size = object->Size(); - MaybeObject* target = space->AllocateRaw(size); - if (target->IsFailure()) { + HeapObject* target_object; + AllocationResult allocation = space->AllocateRaw(size); + if (!allocation.To(&target_object)) { // OS refused to give us memory. V8::FatalProcessOutOfMemory("Evacuation"); return; } - Object* target_object = target->ToObjectUnchecked(); - - MigrateObject(HeapObject::cast(target_object), - object, - size, - space->identity()); + MigrateObject(target_object, object, size, space->identity()); ASSERT(object->map_word().IsForwardingAddress()); } @@ -3133,12 +3171,10 @@ void MarkCompactCollector::EvacuatePages() { int npages = evacuation_candidates_.length(); for (int i = 0; i < npages; i++) { Page* p = evacuation_candidates_[i]; - // TODO(hpayer): This check is just used for debugging purpose and - // should be removed or turned into an assert after investigating the - // crash in concurrent sweeping. - CHECK(p->IsEvacuationCandidate() || - p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); - CHECK_EQ(static_cast<int>(p->parallel_sweeping()), 0); + ASSERT(p->IsEvacuationCandidate() || + p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); + ASSERT(static_cast<int>(p->parallel_sweeping()) == + MemoryChunk::PARALLEL_SWEEPING_DONE); if (p->IsEvacuationCandidate()) { // During compaction we might have to request a new page. // Check that space still have room for that. @@ -3152,7 +3188,6 @@ void MarkCompactCollector::EvacuatePages() { slots_buffer_allocator_.DeallocateChain(page->slots_buffer_address()); page->ClearEvacuationCandidate(); page->SetFlag(Page::RESCAN_ON_EVACUATION); - page->InsertAfter(static_cast<PagedSpace*>(page->owner())->anchor()); } return; } @@ -3409,7 +3444,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) { // Return true if the given code is deoptimized or will be deoptimized. bool MarkCompactCollector::WillBeDeoptimized(Code* code) { - return code->marked_for_deoptimization(); + return code->is_optimized_code() && code->marked_for_deoptimization(); } @@ -3616,7 +3651,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { WeakHashTable* table = WeakHashTable::cast(heap_->weak_object_to_code_table()); table->Iterate(&updating_visitor); - table->Rehash(heap_->undefined_value()); + table->Rehash(heap_->isolate()->factory()->undefined_value()); } // Update pointers from external string table. @@ -3643,14 +3678,14 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { } -void MarkCompactCollector::UnlinkEvacuationCandidates() { +void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { int npages = evacuation_candidates_.length(); for (int i = 0; i < npages; i++) { Page* p = evacuation_candidates_[i]; if (!p->IsEvacuationCandidate()) continue; p->Unlink(); - p->ClearSweptPrecisely(); - p->ClearSweptConservatively(); + PagedSpace* space = static_cast<PagedSpace*>(p->owner()); + p->InsertAfter(space->LastPage()); } } @@ -3665,7 +3700,7 @@ void MarkCompactCollector::ReleaseEvacuationCandidates() { p->set_scan_on_scavenge(false); slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); p->ResetLiveBytes(); - space->ReleasePage(p, false); + space->ReleasePage(p); } evacuation_candidates_.Rewind(0); compacting_ = false; @@ -3987,10 +4022,7 @@ template<MarkCompactCollector::SweepingParallelism mode> intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, FreeList* free_list, Page* p) { - // TODO(hpayer): This check is just used for debugging purpose and - // should be removed or turned into an assert after investigating the - // crash in concurrent sweeping. - CHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); + ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept()); ASSERT((mode == MarkCompactCollector::SWEEP_IN_PARALLEL && free_list != NULL) || (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY && @@ -4092,35 +4124,37 @@ void MarkCompactCollector::SweepInParallel(PagedSpace* space) { free_list->Concatenate(&private_free_list); p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_FINALIZE); } + if (p == space->end_of_unswept_pages()) break; } } void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { space->set_was_swept_conservatively(sweeper == CONSERVATIVE || - sweeper == LAZY_CONSERVATIVE || sweeper == PARALLEL_CONSERVATIVE || sweeper == CONCURRENT_CONSERVATIVE); space->ClearStats(); + // We defensively initialize end_of_unswept_pages_ here with the first page + // of the pages list. + space->set_end_of_unswept_pages(space->FirstPage()); + PageIterator it(space); int pages_swept = 0; - bool lazy_sweeping_active = false; bool unused_page_present = false; bool parallel_sweeping_active = false; while (it.has_next()) { Page* p = it.next(); - ASSERT(p->parallel_sweeping() == MemoryChunk::PARALLEL_SWEEPING_DONE); - ASSERT(!p->IsEvacuationCandidate()); // Clear sweeping flags indicating that marking bits are still intact. p->ClearSweptPrecisely(); p->ClearSweptConservatively(); - if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { + if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION) || + p->IsEvacuationCandidate()) { // Will be processed in EvacuateNewSpaceAndCandidates. ASSERT(evacuation_candidates_.length() > 0); continue; @@ -4136,7 +4170,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { // Adjust unswept free bytes because releasing a page expects said // counter to be accurate for unswept pages. space->IncreaseUnsweptFreeBytes(p); - space->ReleasePage(p, true); + space->ReleasePage(p); continue; } unused_page_present = true; @@ -4152,25 +4186,6 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { pages_swept++; break; } - case LAZY_CONSERVATIVE: { - if (lazy_sweeping_active) { - if (FLAG_gc_verbose) { - PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n", - reinterpret_cast<intptr_t>(p)); - } - space->IncreaseUnsweptFreeBytes(p); - } else { - if (FLAG_gc_verbose) { - PrintF("Sweeping 0x%" V8PRIxPTR " conservatively.\n", - reinterpret_cast<intptr_t>(p)); - } - SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p); - pages_swept++; - space->SetPagesToSweep(p->next_page()); - lazy_sweeping_active = true; - } - break; - } case CONCURRENT_CONSERVATIVE: case PARALLEL_CONSERVATIVE: { if (!parallel_sweeping_active) { @@ -4189,6 +4204,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_PENDING); space->IncreaseUnsweptFreeBytes(p); } + space->set_end_of_unswept_pages(p); break; } case PRECISE: { @@ -4231,17 +4247,14 @@ void MarkCompactCollector::SweepSpaces() { #ifdef DEBUG state_ = SWEEP_SPACES; #endif - SweeperType how_to_sweep = - FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; + SweeperType how_to_sweep = CONSERVATIVE; if (AreSweeperThreadsActivated()) { if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; } if (sweep_precisely_) how_to_sweep = PRECISE; - // Unlink evacuation candidates before sweeper threads access the list of - // pages to avoid race condition. - UnlinkEvacuationCandidates(); + MoveEvacuationCandidatesToEndOfPagesList(); // Noncompacting collections simply sweep the spaces to clear the mark // bits and free the nonlive blocks (for old and map spaces). We sweep @@ -4256,7 +4269,6 @@ void MarkCompactCollector::SweepSpaces() { if (how_to_sweep == PARALLEL_CONSERVATIVE || how_to_sweep == CONCURRENT_CONSERVATIVE) { - // TODO(hpayer): fix race with concurrent sweeper StartSweeperThreads(); } @@ -4305,12 +4317,10 @@ void MarkCompactCollector::ParallelSweepSpacesComplete() { void MarkCompactCollector::EnableCodeFlushing(bool enable) { -#ifdef ENABLE_DEBUGGER_SUPPORT if (isolate()->debug()->IsLoaded() || isolate()->debug()->has_break_points()) { enable = false; } -#endif if (enable) { if (code_flusher_ != NULL) return; diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h index 0ebe8a0f7..254f2589c 100644 --- a/deps/v8/src/mark-compact.h +++ b/deps/v8/src/mark-compact.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MARK_COMPACT_H_ #define V8_MARK_COMPACT_H_ @@ -536,35 +513,6 @@ class ThreadLocalTop; // Mark-Compact collector class MarkCompactCollector { public: - // Type of functions to compute forwarding addresses of objects in - // compacted spaces. Given an object and its size, return a (non-failure) - // Object* that will be the object after forwarding. There is a separate - // allocation function for each (compactable) space based on the location - // of the object before compaction. - typedef MaybeObject* (*AllocationFunction)(Heap* heap, - HeapObject* object, - int object_size); - - // Type of functions to encode the forwarding address for an object. - // Given the object, its size, and the new (non-failure) object it will be - // forwarded to, encode the forwarding address. For paged spaces, the - // 'offset' input/output parameter contains the offset of the forwarded - // object from the forwarding address of the previous live object in the - // page as input, and is updated to contain the offset to be used for the - // next live object in the same page. For spaces using a different - // encoding (i.e., contiguous spaces), the offset parameter is ignored. - typedef void (*EncodingFunction)(Heap* heap, - HeapObject* old_object, - int object_size, - Object* new_object, - int* offset); - - // Type of functions to process non-live objects. - typedef void (*ProcessNonLiveFunction)(HeapObject* object, Isolate* isolate); - - // Pointer to member function, used in IterateLiveObjects. - typedef int (MarkCompactCollector::*LiveObjectCallback)(HeapObject* obj); - // Set the global flags, it must be called before Prepare to take effect. inline void SetFlags(int flags); @@ -623,7 +571,6 @@ class MarkCompactCollector { enum SweeperType { CONSERVATIVE, - LAZY_CONSERVATIVE, PARALLEL_CONSERVATIVE, CONCURRENT_CONSERVATIVE, PRECISE @@ -638,7 +585,7 @@ class MarkCompactCollector { void VerifyMarkbitsAreClean(); static void VerifyMarkbitsAreClean(PagedSpace* space); static void VerifyMarkbitsAreClean(NewSpace* space); - void VerifyWeakEmbeddedObjectsInOptimizedCode(); + void VerifyWeakEmbeddedObjectsInCode(); void VerifyOmittedMapChecks(); #endif @@ -724,7 +671,9 @@ class MarkCompactCollector { void WaitUntilSweepingCompleted(); - intptr_t RefillFreeLists(PagedSpace* space); + bool IsSweepingCompleted(); + + void RefillFreeList(PagedSpace* space); bool AreSweeperThreadsActivated(); @@ -743,7 +692,7 @@ class MarkCompactCollector { void MarkWeakObjectToCodeTable(); // Special case for processing weak references in a full collection. We need - // to artifically keep AllocationSites alive for a time. + // to artificially keep AllocationSites alive for a time. void MarkAllocationSite(AllocationSite* site); private: @@ -757,9 +706,6 @@ class MarkCompactCollector { void RemoveDeadInvalidatedCode(); void ProcessInvalidatedCode(ObjectVisitor* visitor); - void UnlinkEvacuationCandidates(); - void ReleaseEvacuationCandidates(); - void StartSweeperThreads(); #ifdef DEBUG @@ -899,8 +845,11 @@ class MarkCompactCollector { void ClearNonLivePrototypeTransitions(Map* map); void ClearNonLiveMapTransitions(Map* map, MarkBit map_mark); - void ClearAndDeoptimizeDependentCode(DependentCode* dependent_code); + void ClearDependentCode(DependentCode* dependent_code); + void ClearDependentICList(Object* head); void ClearNonLiveDependentCode(DependentCode* dependent_code); + int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group, + int start, int end, int new_start); // Marking detaches initial maps from SharedFunctionInfo objects // to make this reference weak. We need to reattach initial maps @@ -945,6 +894,12 @@ class MarkCompactCollector { void EvacuateNewSpaceAndCandidates(); + void ReleaseEvacuationCandidates(); + + // Moves the pages of the evacuation_candidates_ list to the end of their + // corresponding space pages list. + void MoveEvacuationCandidatesToEndOfPagesList(); + void SweepSpace(PagedSpace* space, SweeperType sweeper); // Finalizes the parallel sweeping phase. Marks all the pages that were diff --git a/deps/v8/src/math.js b/deps/v8/src/math.js index da96d967e..f8738b5f8 100644 --- a/deps/v8/src/math.js +++ b/deps/v8/src/math.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declarations have been made // in runtime.js: @@ -52,24 +29,24 @@ function MathAbs(x) { // ECMA 262 - 15.8.2.2 function MathAcos(x) { - return %Math_acos(TO_NUMBER_INLINE(x)); + return %MathAcos(TO_NUMBER_INLINE(x)); } // ECMA 262 - 15.8.2.3 function MathAsin(x) { - return %Math_asin(TO_NUMBER_INLINE(x)); + return %MathAsin(TO_NUMBER_INLINE(x)); } // ECMA 262 - 15.8.2.4 function MathAtan(x) { - return %Math_atan(TO_NUMBER_INLINE(x)); + return %MathAtan(TO_NUMBER_INLINE(x)); } // ECMA 262 - 15.8.2.5 // The naming of y and x matches the spec, as does the order in which // ToNumber (valueOf) is called. function MathAtan2(y, x) { - return %Math_atan2(TO_NUMBER_INLINE(y), TO_NUMBER_INLINE(x)); + return %MathAtan2(TO_NUMBER_INLINE(y), TO_NUMBER_INLINE(x)); } // ECMA 262 - 15.8.2.6 @@ -85,7 +62,7 @@ function MathCos(x) { // ECMA 262 - 15.8.2.8 function MathExp(x) { - return %Math_exp(TO_NUMBER_INLINE(x)); + return %MathExp(TO_NUMBER_INLINE(x)); } // ECMA 262 - 15.8.2.9 @@ -100,7 +77,7 @@ function MathFloor(x) { // has to be -0, which wouldn't be the case with the shift. return TO_UINT32(x); } else { - return %Math_floor(x); + return %MathFloor(x); } } diff --git a/deps/v8/src/messages.cc b/deps/v8/src/messages.cc index 0077d0309..8c084e266 100644 --- a/deps/v8/src/messages.cc +++ b/deps/v8/src/messages.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -78,7 +55,7 @@ Handle<JSMessageObject> MessageHandler::MakeMessageObject( if (loc) { start = loc->start_pos(); end = loc->end_pos(); - script_handle = GetScriptWrapper(loc->script()); + script_handle = Script::GetWrapper(loc->script()); } Handle<Object> stack_frames_handle = stack_frames.is_null() @@ -107,7 +84,7 @@ void MessageHandler::ReportMessage(Isolate* isolate, // We pass the exception object into the message handler callback though. Object* exception_object = isolate->heap()->undefined_value(); if (isolate->has_pending_exception()) { - isolate->pending_exception()->ToObject(&exception_object); + exception_object = isolate->pending_exception(); } Handle<Object> exception_handle(exception_object, isolate); @@ -154,24 +131,16 @@ Handle<String> MessageHandler::GetMessage(Isolate* isolate, Factory* factory = isolate->factory(); Handle<String> fmt_str = factory->InternalizeOneByteString(STATIC_ASCII_VECTOR("FormatMessage")); - Handle<JSFunction> fun = - Handle<JSFunction>( - JSFunction::cast( - isolate->js_builtins_object()-> - GetPropertyNoExceptionThrown(*fmt_str))); + Handle<JSFunction> fun = Handle<JSFunction>::cast(Object::GetProperty( + isolate->js_builtins_object(), fmt_str).ToHandleChecked()); Handle<JSMessageObject> message = Handle<JSMessageObject>::cast(data); Handle<Object> argv[] = { Handle<Object>(message->type(), isolate), Handle<Object>(message->arguments(), isolate) }; - bool caught_exception; - Handle<Object> result = - Execution::TryCall(fun, - isolate->js_builtins_object(), - ARRAY_SIZE(argv), - argv, - &caught_exception); - - if (caught_exception || !result->IsString()) { + MaybeHandle<Object> maybe_result = Execution::TryCall( + fun, isolate->js_builtins_object(), ARRAY_SIZE(argv), argv); + Handle<Object> result; + if (!maybe_result.ToHandle(&result) || !result->IsString()) { return factory->InternalizeOneByteString(STATIC_ASCII_VECTOR("<error>")); } Handle<String> result_string = Handle<String>::cast(result); @@ -180,7 +149,7 @@ Handle<String> MessageHandler::GetMessage(Isolate* isolate, // here to improve the efficiency of converting it to a C string and // other operations that are likely to take place (see GetLocalizedMessage // for example). - FlattenString(result_string); + result_string = String::Flatten(result_string); return result_string; } diff --git a/deps/v8/src/messages.h b/deps/v8/src/messages.h index 2f4be518b..297160d98 100644 --- a/deps/v8/src/messages.h +++ b/deps/v8/src/messages.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // The infrastructure used for (localized) message reporting in V8. // diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js index a389bb8fe..1965da104 100644 --- a/deps/v8/src/messages.js +++ b/deps/v8/src/messages.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // ------------------------------------------------------------------- @@ -47,7 +24,8 @@ var kMessages = { incompatible_method_receiver: ["Method ", "%0", " called on incompatible receiver ", "%1"], multiple_defaults_in_switch: ["More than one default clause in switch statement"], newline_after_throw: ["Illegal newline after throw"], - redeclaration: ["%0", " '", "%1", "' has already been declared"], + label_redeclaration: ["Label '", "%0", "' has already been declared"], + var_redeclaration: ["Identifier '", "%0", "' has already been declared"], no_catch_or_finally: ["Missing catch or finally after try"], unknown_label: ["Undefined label '", "%0", "'"], uncaught_exception: ["Uncaught ", "%0"], @@ -99,6 +77,7 @@ var kMessages = { observe_perform_non_string: ["Invalid non-string changeType"], observe_perform_non_function: ["Cannot perform non-function"], observe_notify_non_notifier: ["notify called on non-notifier object"], + observe_global_proxy: ["%0", " cannot be called on the global proxy object"], not_typed_array: ["this is not a typed array."], invalid_argument: ["invalid_argument"], data_view_not_array_buffer: ["First argument to DataView constructor must be an ArrayBuffer"], @@ -153,7 +132,8 @@ var kMessages = { array_indexof_not_defined: ["Array.getIndexOf: Argument undefined"], object_not_extensible: ["Can't add property ", "%0", ", object is not extensible"], illegal_access: ["Illegal access"], - invalid_preparser_data: ["Invalid preparser data for function ", "%0"], + invalid_cached_data_function: ["Invalid cached data for function ", "%0"], + invalid_cached_data: ["Invalid cached data"], strict_mode_with: ["Strict mode code may not include a with statement"], strict_eval_arguments: ["Unexpected eval or arguments in strict mode"], too_many_arguments: ["Too many arguments in function call (only 65535 allowed)"], @@ -198,8 +178,8 @@ function FormatString(format, args) { try { str = NoSideEffectToString(args[arg_num]); if (str.length > 256) { - str = %SubString(str, 0, 239) + "...<omitted>..." + - %SubString(str, str.length - 2, str.length); + str = %_SubString(str, 0, 239) + "...<omitted>..." + + %_SubString(str, str.length - 2, str.length); } } catch (e) { if (%IsJSModule(args[arg_num])) @@ -1155,19 +1135,6 @@ function captureStackTrace(obj, cons_opt) { stackTraceLimit); var error_string = FormatErrorString(obj); - // The holder of this getter ('obj') may not be the receiver ('this'). - // When this getter is called the first time, we use the context values to - // format a stack trace string and turn this accessor pair into a data - // property (on the holder). - var getter = function() { - // Stack is still a raw array awaiting to be formatted. - var result = FormatStackTrace(obj, error_string, GetStackFrames(stack)); - // Turn this accessor into a data property. - %DefineOrRedefineDataProperty(obj, 'stack', result, NONE); - // Release context values. - stack = error_string = UNDEFINED; - return result; - }; // Set the 'stack' property on the receiver. If the receiver is the same as // holder of this setter, the accessor pair is turned into a data property. @@ -1180,6 +1147,21 @@ function captureStackTrace(obj, cons_opt) { } }; + // The holder of this getter ('obj') may not be the receiver ('this'). + // When this getter is called the first time, we use the context values to + // format a stack trace string and turn this accessor pair into a data + // property (on the holder). + var getter = function() { + // Stack is still a raw array awaiting to be formatted. + var result = FormatStackTrace(obj, error_string, GetStackFrames(stack)); + // Replace this accessor to return result directly. + %DefineOrRedefineAccessorProperty( + obj, 'stack', function() { return result }, setter, DONT_ENUM); + // Release context values. + stack = error_string = UNDEFINED; + return result; + }; + %DefineOrRedefineAccessorProperty(obj, 'stack', getter, setter, DONT_ENUM); } @@ -1318,6 +1300,15 @@ function SetUpStackOverflowBoilerplate() { var error_string = boilerplate.name + ": " + boilerplate.message; + // Set the 'stack' property on the receiver. If the receiver is the same as + // holder of this setter, the accessor pair is turned into a data property. + var setter = function(v) { + %DefineOrRedefineDataProperty(this, 'stack', v, NONE); + // Tentatively clear the hidden property. If the receiver is the same as + // holder, we release the raw stack trace this way. + %GetAndClearOverflowedStackTrace(this); + }; + // The raw stack trace is stored as a hidden property on the holder of this // getter, which may not be the same as the receiver. Find the holder to // retrieve the raw stack trace and then turn this accessor pair into a @@ -1333,20 +1324,12 @@ function SetUpStackOverflowBoilerplate() { if (IS_UNDEFINED(stack)) return stack; var result = FormatStackTrace(holder, error_string, GetStackFrames(stack)); - // Replace this accessor with a data property. - %DefineOrRedefineDataProperty(holder, 'stack', result, NONE); + // Replace this accessor to return result directly. + %DefineOrRedefineAccessorProperty( + holder, 'stack', function() { return result }, setter, DONT_ENUM); return result; }; - // Set the 'stack' property on the receiver. If the receiver is the same as - // holder of this setter, the accessor pair is turned into a data property. - var setter = function(v) { - %DefineOrRedefineDataProperty(this, 'stack', v, NONE); - // Tentatively clear the hidden property. If the receiver is the same as - // holder, we release the raw stack trace this way. - %GetAndClearOverflowedStackTrace(this); - }; - %DefineOrRedefineAccessorProperty( boilerplate, 'stack', getter, setter, DONT_ENUM); diff --git a/deps/v8/src/mips/assembler-mips-inl.h b/deps/v8/src/mips/assembler-mips-inl.h index f7f435413..ba4c4f134 100644 --- a/deps/v8/src/mips/assembler-mips-inl.h +++ b/deps/v8/src/mips/assembler-mips-inl.h @@ -255,7 +255,7 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) { } -static const int kNoCodeAgeSequenceLength = 7; +static const int kNoCodeAgeSequenceLength = 7 * Assembler::kInstrSize; Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { @@ -361,14 +361,12 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { visitor->VisitExternalReference(this); } else if (RelocInfo::IsCodeAgeSequence(mode)) { visitor->VisitCodeAgeSequence(this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence())) && isolate->debug()->has_break_points()) { visitor->VisitDebugTarget(this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { visitor->VisitRuntimeEntry(this); } @@ -388,14 +386,12 @@ void RelocInfo::Visit(Heap* heap) { StaticVisitor::VisitExternalReference(this); } else if (RelocInfo::IsCodeAgeSequence(mode)) { StaticVisitor::VisitCodeAgeSequence(heap, this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (heap->isolate()->debug()->has_break_points() && ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence()))) { StaticVisitor::VisitDebugTarget(heap, this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { StaticVisitor::VisitRuntimeEntry(this); } diff --git a/deps/v8/src/mips/assembler-mips.cc b/deps/v8/src/mips/assembler-mips.cc index b659559fe..e629868e4 100644 --- a/deps/v8/src/mips/assembler-mips.cc +++ b/deps/v8/src/mips/assembler-mips.cc @@ -102,10 +102,11 @@ const char* DoubleRegister::AllocationIndexToString(int index) { } -void CpuFeatures::Probe() { +void CpuFeatures::Probe(bool serializer_enabled) { unsigned standard_features = (OS::CpuFeaturesImpliedByPlatform() | CpuFeaturesImpliedByCompiler()); - ASSERT(supported_ == 0 || supported_ == standard_features); + ASSERT(supported_ == 0 || + (supported_ & standard_features) == standard_features); #ifdef DEBUG initialized_ = true; #endif @@ -115,7 +116,7 @@ void CpuFeatures::Probe() { // snapshot. supported_ |= standard_features; - if (Serializer::enabled()) { + if (serializer_enabled) { // No probing for features if we might serialize (generate snapshot). return; } @@ -1655,10 +1656,12 @@ void Assembler::lwc1(FPURegister fd, const MemOperand& src) { void Assembler::ldc1(FPURegister fd, const MemOperand& src) { // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit // load to two 32-bit loads. - GenInstrImmediate(LWC1, src.rm(), fd, src.offset_); + GenInstrImmediate(LWC1, src.rm(), fd, src.offset_ + + Register::kMantissaOffset); FPURegister nextfpreg; nextfpreg.setcode(fd.code() + 1); - GenInstrImmediate(LWC1, src.rm(), nextfpreg, src.offset_ + 4); + GenInstrImmediate(LWC1, src.rm(), nextfpreg, src.offset_ + + Register::kExponentOffset); } @@ -1670,10 +1673,12 @@ void Assembler::swc1(FPURegister fd, const MemOperand& src) { void Assembler::sdc1(FPURegister fd, const MemOperand& src) { // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit // store to two 32-bit stores. - GenInstrImmediate(SWC1, src.rm(), fd, src.offset_); + GenInstrImmediate(SWC1, src.rm(), fd, src.offset_ + + Register::kMantissaOffset); FPURegister nextfpreg; nextfpreg.setcode(fd.code() + 1); - GenInstrImmediate(SWC1, src.rm(), nextfpreg, src.offset_ + 4); + GenInstrImmediate(SWC1, src.rm(), nextfpreg, src.offset_ + + Register::kExponentOffset); } @@ -2075,12 +2080,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (!RelocInfo::IsNone(rinfo.rmode())) { // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE) { -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - if (!Serializer::enabled() && !emit_debug_code()) { + if (!Serializer::enabled(isolate()) && !emit_debug_code()) { return; } } @@ -2328,16 +2328,17 @@ void Assembler::JumpLabelToJumpRegister(Address pc) { } -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { +Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { // No out-of-line constant pool support. - UNREACHABLE(); - return NULL; + ASSERT(!FLAG_enable_ool_constant_pool); + return isolate->factory()->empty_constant_pool_array(); } void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { // No out-of-line constant pool support. - UNREACHABLE(); + ASSERT(!FLAG_enable_ool_constant_pool); + return; } diff --git a/deps/v8/src/mips/assembler-mips.h b/deps/v8/src/mips/assembler-mips.h index ea956e135..860097c8a 100644 --- a/deps/v8/src/mips/assembler-mips.h +++ b/deps/v8/src/mips/assembler-mips.h @@ -77,6 +77,16 @@ struct Register { static const int kSizeInBytes = 4; static const int kCpRegister = 23; // cp (s7) is the 23rd register. +#if defined(V8_TARGET_LITTLE_ENDIAN) + static const int kMantissaOffset = 0; + static const int kExponentOffset = 4; +#elif defined(V8_TARGET_BIG_ENDIAN) + static const int kMantissaOffset = 4; + static const int kExponentOffset = 0; +#else +#error Unknown endianness +#endif + inline static int NumAllocatableRegisters(); static int ToAllocationIndex(Register reg) { @@ -415,7 +425,10 @@ class CpuFeatures : public AllStatic { public: // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). - static void Probe(); + static void Probe(bool serializer_enabled); + + // A special case for printing target and features, which we want to do + // before initializing the isolate // Check whether a feature is supported by the target CPU. static bool IsSupported(CpuFeature f) { @@ -423,15 +436,11 @@ class CpuFeatures : public AllStatic { return Check(f, supported_); } - static bool IsFoundByRuntimeProbingOnly(CpuFeature f) { - ASSERT(initialized_); - return Check(f, found_by_runtime_probing_only_); - } - - static bool IsSafeForSnapshot(CpuFeature f) { + static bool IsSafeForSnapshot(Isolate* isolate, CpuFeature f) { return Check(f, cross_compile_) || (IsSupported(f) && - (!Serializer::enabled() || !IsFoundByRuntimeProbingOnly(f))); + !(Serializer::enabled(isolate) && + Check(f, found_by_runtime_probing_only_))); } static bool VerifyCrossCompiling() { @@ -444,6 +453,8 @@ class CpuFeatures : public AllStatic { (cross_compile_ & mask) == mask; } + static bool SupportsCrankshaft() { return CpuFeatures::IsSupported(FPU); } + private: static bool Check(CpuFeature f, unsigned set) { return (set & flag2set(f)) != 0; @@ -1007,7 +1018,7 @@ class Assembler : public AssemblerBase { void CheckTrampolinePool(); // Allocate a constant pool of the correct size for the generated code. - MaybeObject* AllocateConstantPool(Heap* heap); + Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate); // Generate the constant pool for the generated code. void PopulateConstantPool(ConstantPoolArray* constant_pool); diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc index 03d6cc80d..fdd062b6b 100644 --- a/deps/v8/src/mips/builtins-mips.cc +++ b/deps/v8/src/mips/builtins-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. @@ -391,13 +368,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // the preconditions is not met, the code bails out to the runtime call. if (FLAG_inline_new) { Label undo_allocation; -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = ExternalReference::debug_step_in_fp_address(isolate); __ li(a2, Operand(debug_step_in_fp)); __ lw(a2, MemOperand(a2)); __ Branch(&rt_call, ne, a2, Operand(zero_reg)); -#endif // Load the initial map and verify that it is in fact a map. // a1: constructor function @@ -470,9 +445,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, if (count_constructions) { __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); - __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); - __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, - kBitsPerByte); + __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset)); __ sll(at, a0, kPointerSizeLog2); __ addu(a0, t5, at); __ sll(at, a3, kPointerSizeLog2); @@ -525,12 +498,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); // The field instance sizes contains both pre-allocated property fields // and in-object properties. - __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); - __ Ext(t6, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, - kBitsPerByte); + __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset)); __ Addu(a3, a3, Operand(t6)); - __ Ext(t6, a0, Map::kInObjectPropertiesByte * kBitsPerByte, - kBitsPerByte); + __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset)); __ subu(a3, a3, t6); // Done if no extra properties are to be allocated. @@ -829,7 +799,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, if (is_construct) { // No type feedback cell is available __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ CallStub(&stub); } else { ParameterCount actual(a0); @@ -895,7 +865,7 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { // Set a0 to point to the head of the PlatformCodeAge sequence. __ Subu(a0, a0, - Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); + Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); // The following registers must be saved and restored when calling through to // the runtime: @@ -934,7 +904,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { // Set a0 to point to the head of the PlatformCodeAge sequence. __ Subu(a0, a0, - Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); + Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); // The following registers must be saved and restored when calling through to // the runtime: @@ -956,7 +926,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); // Jump to point after the code-age stub. - __ Addu(a0, a0, Operand((kNoCodeAgeSequenceLength) * Assembler::kInstrSize)); + __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength)); __ Jump(a0); } @@ -1305,7 +1275,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // Out of stack space. __ lw(a1, MemOperand(fp, kFunctionOffset)); __ Push(a1, v0); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); // End of stack check. // Push current limit and index. @@ -1426,6 +1396,27 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { } +static void ArgumentAdaptorStackCheck(MacroAssembler* masm, + Label* stack_overflow) { + // ----------- S t a t e ------------- + // -- a0 : actual number of arguments + // -- a1 : function (passed through to callee) + // -- a2 : expected number of arguments + // ----------------------------------- + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + __ LoadRoot(t1, Heap::kRealStackLimitRootIndex); + // Make t1 the space we have left. The stack might already be overflowed + // here which will cause t1 to become negative. + __ subu(t1, sp, t1); + // Check if the arguments will overflow the stack. + __ sll(at, a2, kPointerSizeLog2); + // Signed comparison. + __ Branch(stack_overflow, le, t1, Operand(at)); +} + + static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ sll(a0, a0, kSmiTagSize); __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); @@ -1460,6 +1451,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // -- a2: expected arguments count // ----------------------------------- + Label stack_overflow; + ArgumentAdaptorStackCheck(masm, &stack_overflow); Label invoke, dont_adapt_arguments; Label enough, too_few; @@ -1568,6 +1561,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ------------------------------------------- __ bind(&dont_adapt_arguments); __ Jump(a3); + + __ bind(&stack_overflow); + { + FrameScope frame(masm, StackFrame::MANUAL); + EnterArgumentsAdaptorFrame(masm); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + __ break_(0xCC); + } } diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc index 332ed4b6a..79af21940 100644 --- a/deps/v8/src/mips/code-stubs-mips.cc +++ b/deps/v8/src/mips/code-stubs-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -40,7 +17,6 @@ namespace internal { void FastNewClosureStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a2 }; descriptor->register_param_count_ = 1; @@ -51,7 +27,6 @@ void FastNewClosureStub::InitializeInterfaceDescriptor( void FastNewContextStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1 }; descriptor->register_param_count_ = 1; @@ -61,7 +36,6 @@ void FastNewContextStub::InitializeInterfaceDescriptor( void ToNumberStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0 }; descriptor->register_param_count_ = 1; @@ -71,7 +45,6 @@ void ToNumberStub::InitializeInterfaceDescriptor( void NumberToStringStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0 }; descriptor->register_param_count_ = 1; @@ -82,7 +55,6 @@ void NumberToStringStub::InitializeInterfaceDescriptor( void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a3, a2, a1 }; descriptor->register_param_count_ = 3; @@ -94,7 +66,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a3, a2, a1, a0 }; descriptor->register_param_count_ = 4; @@ -105,7 +76,6 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a2, a3 }; descriptor->register_param_count_ = 2; @@ -115,7 +85,6 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor( void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1, a0 }; descriptor->register_param_count_ = 2; @@ -126,7 +95,6 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = {a1, a0 }; descriptor->register_param_count_ = 2; @@ -137,7 +105,6 @@ void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( void RegExpConstructResultStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a2, a1, a0 }; descriptor->register_param_count_ = 3; @@ -148,7 +115,6 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor( void LoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0 }; descriptor->register_param_count_ = 1; @@ -158,7 +124,6 @@ void LoadFieldStub::InitializeInterfaceDescriptor( void KeyedLoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1 }; descriptor->register_param_count_ = 1; @@ -168,7 +133,6 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor( void StringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0, a2 }; descriptor->register_param_count_ = 2; @@ -178,7 +142,6 @@ void StringLengthStub::InitializeInterfaceDescriptor( void KeyedStringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1, a0 }; descriptor->register_param_count_ = 2; @@ -188,7 +151,6 @@ void KeyedStringLengthStub::InitializeInterfaceDescriptor( void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a2, a1, a0 }; descriptor->register_param_count_ = 3; @@ -199,7 +161,6 @@ void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( void TransitionElementsKindStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0, a1 }; descriptor->register_param_count_ = 2; @@ -211,7 +172,6 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor( void CompareNilICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0 }; descriptor->register_param_count_ = 1; @@ -219,12 +179,11 @@ void CompareNilICStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(CompareNilIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); } static void InitializeArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -253,7 +212,6 @@ static void InitializeArrayConstructorDescriptor( static void InitializeInternalArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -281,28 +239,24 @@ static void InitializeInternalArrayConstructorDescriptor( void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeArrayConstructorDescriptor(descriptor, 0); } void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeArrayConstructorDescriptor(descriptor, 1); } void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeArrayConstructorDescriptor(descriptor, -1); } void ToBooleanStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0 }; descriptor->register_param_count_ = 1; @@ -310,33 +264,29 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); } void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeInternalArrayConstructorDescriptor(descriptor, 0); } void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeInternalArrayConstructorDescriptor(descriptor, 1); } void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeInternalArrayConstructorDescriptor(descriptor, -1); } void StoreGlobalStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1, a2, a0 }; descriptor->register_param_count_ = 3; @@ -347,7 +297,6 @@ void StoreGlobalStub::InitializeInterfaceDescriptor( void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a0, a3, a1, a2 }; descriptor->register_param_count_ = 4; @@ -358,19 +307,17 @@ void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( void BinaryOpICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1, a0 }; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); } void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a2, a1, a0 }; descriptor->register_param_count_ = 3; @@ -381,7 +328,6 @@ void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( void StringAddStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { a1, a0 }; descriptor->register_param_count_ = 2; @@ -494,10 +440,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // Update the static counter each time a new code stub is generated. - Isolate* isolate = masm->isolate(); - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); - CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); + CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); int param_count = descriptor->register_param_count_; { // Call the runtime system in a fresh internal frame. @@ -526,11 +471,13 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // stub so you don't have to set up the frame. class ConvertToDoubleStub : public PlatformCodeStub { public: - ConvertToDoubleStub(Register result_reg_1, + ConvertToDoubleStub(Isolate* isolate, + Register result_reg_1, Register result_reg_2, Register source_reg, Register scratch_reg) - : result1_(result_reg_1), + : PlatformCodeStub(isolate), + result1_(result_reg_1), result2_(result_reg_2), source_(source_reg), zeros_(scratch_reg) { } @@ -559,13 +506,14 @@ class ConvertToDoubleStub : public PlatformCodeStub { void ConvertToDoubleStub::Generate(MacroAssembler* masm) { -#ifndef BIG_ENDIAN_FLOATING_POINT - Register exponent = result1_; - Register mantissa = result2_; -#else - Register exponent = result2_; - Register mantissa = result1_; -#endif + Register exponent, mantissa; + if (kArchEndian == kLittle) { + exponent = result1_; + mantissa = result2_; + } else { + exponent = result2_; + mantissa = result1_; + } Label not_special; // Convert from Smi to integer. __ sra(source_, source_, kSmiTagSize); @@ -671,8 +619,10 @@ void DoubleToIStub::Generate(MacroAssembler* masm) { Register input_high = scratch2; Register input_low = scratch3; - __ lw(input_low, MemOperand(input_reg, double_offset)); - __ lw(input_high, MemOperand(input_reg, double_offset + kIntSize)); + __ lw(input_low, + MemOperand(input_reg, double_offset + Register::kMantissaOffset)); + __ lw(input_high, + MemOperand(input_reg, double_offset + Register::kExponentOffset)); Label normal_exponent, restore_sign; // Extract the biased exponent in result. @@ -758,10 +708,10 @@ void DoubleToIStub::Generate(MacroAssembler* masm) { void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); - WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); - stub1.GetCode(isolate); - stub2.GetCode(isolate); + WriteInt32ToHeapNumberStub stub1(isolate, a1, v0, a2, a3); + WriteInt32ToHeapNumberStub stub2(isolate, a2, v0, a3, a0); + stub1.GetCode(); + stub2.GetCode(); } @@ -1151,8 +1101,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { // f12, f14 are the double representations of the left hand side // and the right hand side if we have FPU. Otherwise a2, a3 represent // left hand side and a0, a1 represent right hand side. - - Isolate* isolate = masm->isolate(); Label nan; __ li(t0, Operand(LESS)); __ li(t1, Operand(GREATER)); @@ -1227,7 +1175,8 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow); - __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3); + __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, + a3); if (cc == eq) { StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, @@ -1315,9 +1264,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); - __ li(a0, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); __ CallCFunction( - ExternalReference::store_buffer_overflow_function(masm->isolate()), + ExternalReference::store_buffer_overflow_function(isolate()), argument_count); if (save_doubles_ == kSaveFPRegs) { __ MultiPopFPU(kCallerSavedFPU); @@ -1446,7 +1395,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(0, 2, scratch2); __ MovToFloatParameters(double_base, double_exponent); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); } __ pop(ra); @@ -1507,11 +1456,11 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ cvt_d_w(double_exponent, single_scratch); // Returning or bailing out. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); if (exponent_type_ == ON_STACK) { // The arguments are still on the stack. __ bind(&call_runtime); - __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); + __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); // The stub is called from non-optimized code, which expects the result // as heap number in exponent. @@ -1530,7 +1479,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(0, 2, scratch); __ MovToFloatParameters(double_base, double_exponent); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), + ExternalReference::power_double_double_function(isolate()), 0, 2); } __ pop(ra); @@ -1564,77 +1513,75 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { void StoreRegistersStateStub::GenerateAheadOfTime( Isolate* isolate) { - StoreRegistersStateStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); + StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); // Hydrogen code stubs need stub2 at snapshot time. - StoreRegistersStateStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreRegistersStateStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } void RestoreRegistersStateStub::GenerateAheadOfTime( Isolate* isolate) { - RestoreRegistersStateStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); + RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); // Hydrogen code stubs need stub2 at snapshot time. - RestoreRegistersStateStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + RestoreRegistersStateStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } void CodeStub::GenerateFPStubs(Isolate* isolate) { SaveFPRegsMode mode = kSaveFPRegs; - CEntryStub save_doubles(1, mode); - StoreBufferOverflowStub stub(mode); + CEntryStub save_doubles(isolate, 1, mode); + StoreBufferOverflowStub stub(isolate, mode); // These stubs might already be in the snapshot, detect that and don't // regenerate, which would lead to code stub initialization state being messed // up. Code* save_doubles_code; - if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { - save_doubles_code = *save_doubles.GetCode(isolate); + if (!save_doubles.FindCodeInCache(&save_doubles_code)) { + save_doubles_code = *save_doubles.GetCode(); } Code* store_buffer_overflow_code; - if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) { - store_buffer_overflow_code = *stub.GetCode(isolate); + if (!stub.FindCodeInCache(&store_buffer_overflow_code)) { + store_buffer_overflow_code = *stub.GetCode(); } isolate->set_fp_stubs_generated(true); } void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { - CEntryStub stub(1, kDontSaveFPRegs); - stub.GetCode(isolate); + CEntryStub stub(isolate, 1, kDontSaveFPRegs); + stub.GetCode(); } -void CEntryStub::GenerateCore(MacroAssembler* masm, - Label* throw_normal_exception, - Label* throw_termination_exception, - bool do_gc, - bool always_allocate) { - // v0: result parameter for PerformGC, if any - // s0: number of arguments including receiver (C callee-saved) - // s1: pointer to the first argument (C callee-saved) - // s2: pointer to builtin function (C callee-saved) +void CEntryStub::Generate(MacroAssembler* masm) { + // Called from JavaScript; parameters are on stack as if calling JS function + // s0: number of arguments including receiver + // s1: size of arguments excluding receiver + // s2: pointer to builtin function + // fp: frame pointer (restored after C call) + // sp: stack pointer (restored as callee's sp after C call) + // cp: current context (C callee-saved) - Isolate* isolate = masm->isolate(); + ProfileEntryHookStub::MaybeCallEntryHook(masm); - if (do_gc) { - // Move result passed in v0 into a0 to call PerformGC. - __ mov(a0, v0); - __ PrepareCallCFunction(2, 0, a1); - __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); - __ CallCFunction(ExternalReference::perform_gc_function(isolate), 2, 0); - } + // NOTE: s0-s2 hold the arguments of this function instead of a0-a2. + // The reason for this is that these arguments would need to be saved anyway + // so it's faster to set them up directly. + // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction. - ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(isolate); - if (always_allocate) { - __ li(a0, Operand(scope_depth)); - __ lw(a1, MemOperand(a0)); - __ Addu(a1, a1, Operand(1)); - __ sw(a1, MemOperand(a0)); - } + // Compute the argv pointer in a callee-saved register. + __ Addu(s1, sp, s1); + + // Enter the exit frame that transitions from JavaScript to C++. + FrameScope scope(masm, StackFrame::MANUAL); + __ EnterExitFrame(save_doubles_); + + // s0: number of arguments including receiver (C callee-saved) + // s1: pointer to first argument (C callee-saved) + // s2: pointer to builtin function (C callee-saved) // Prepare arguments for C routine. // a0 = argc @@ -1646,7 +1593,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ AssertStackIsAligned(); - __ li(a2, Operand(ExternalReference::isolate_address(isolate))); + __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); // To let the GC traverse the return address of the exit frames, we need to // know where the return address is. The CEntryStub is unmovable, so @@ -1681,130 +1628,67 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, masm->InstructionsGeneratedSince(&find_ra)); } - if (always_allocate) { - // It's okay to clobber a2 and a3 here. v0 & v1 contain result. - __ li(a2, Operand(scope_depth)); - __ lw(a3, MemOperand(a2)); - __ Subu(a3, a3, Operand(1)); - __ sw(a3, MemOperand(a2)); + + // Runtime functions should not return 'the hole'. Allowing it to escape may + // lead to crashes in the IC code later. + if (FLAG_debug_code) { + Label okay; + __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); + __ Branch(&okay, ne, v0, Operand(t0)); + __ stop("The hole escaped"); + __ bind(&okay); } - // Check for failure result. - Label failure_returned; - STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); - __ addiu(a2, v0, 1); - __ andi(t0, a2, kFailureTagMask); - __ Branch(USE_DELAY_SLOT, &failure_returned, eq, t0, Operand(zero_reg)); - // Restore stack (remove arg slots) in branch delay slot. - __ addiu(sp, sp, kCArgsSlotsSize); + // Check result for exception sentinel. + Label exception_returned; + __ LoadRoot(t0, Heap::kExceptionRootIndex); + __ Branch(&exception_returned, eq, t0, Operand(v0)); + + ExternalReference pending_exception_address( + Isolate::kPendingExceptionAddress, isolate()); + // Check that there is no pending exception, otherwise we + // should have returned the exception sentinel. + if (FLAG_debug_code) { + Label okay; + __ li(a2, Operand(pending_exception_address)); + __ lw(a2, MemOperand(a2)); + __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); + // Cannot use check here as it attempts to generate call into runtime. + __ Branch(&okay, eq, t0, Operand(a2)); + __ stop("Unexpected pending exception"); + __ bind(&okay); + } // Exit C frame and return. // v0:v1: result // sp: stack pointer // fp: frame pointer + // s0: still holds argc (callee-saved). __ LeaveExitFrame(save_doubles_, s0, true, EMIT_RETURN); - // Check if we should retry or throw exception. - Label retry; - __ bind(&failure_returned); - STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); - __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize); - __ Branch(&retry, eq, t0, Operand(zero_reg)); + // Handling of exception. + __ bind(&exception_returned); // Retrieve the pending exception. - __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); - __ lw(v0, MemOperand(t0)); + __ li(a2, Operand(pending_exception_address)); + __ lw(v0, MemOperand(a2)); // Clear the pending exception. - __ li(a3, Operand(isolate->factory()->the_hole_value())); - __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); - __ sw(a3, MemOperand(t0)); + __ li(a3, Operand(isolate()->factory()->the_hole_value())); + __ sw(a3, MemOperand(a2)); // Special handling of termination exceptions which are uncatchable // by javascript code. + Label throw_termination_exception; __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex); - __ Branch(throw_termination_exception, eq, v0, Operand(t0)); + __ Branch(&throw_termination_exception, eq, v0, Operand(t0)); // Handle normal exception. - __ jmp(throw_normal_exception); - - __ bind(&retry); - // Last failure (v0) will be moved to (a0) for parameter when retrying. -} - - -void CEntryStub::Generate(MacroAssembler* masm) { - // Called from JavaScript; parameters are on stack as if calling JS function - // s0: number of arguments including receiver - // s1: size of arguments excluding receiver - // s2: pointer to builtin function - // fp: frame pointer (restored after C call) - // sp: stack pointer (restored as callee's sp after C call) - // cp: current context (C callee-saved) - - ProfileEntryHookStub::MaybeCallEntryHook(masm); - - // NOTE: Invocations of builtins may return failure objects - // instead of a proper result. The builtin entry handles - // this by performing a garbage collection and retrying the - // builtin once. - - // NOTE: s0-s2 hold the arguments of this function instead of a0-a2. - // The reason for this is that these arguments would need to be saved anyway - // so it's faster to set them up directly. - // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction. - - // Compute the argv pointer in a callee-saved register. - __ Addu(s1, sp, s1); - - // Enter the exit frame that transitions from JavaScript to C++. - FrameScope scope(masm, StackFrame::MANUAL); - __ EnterExitFrame(save_doubles_); - - // s0: number of arguments (C callee-saved) - // s1: pointer to first argument (C callee-saved) - // s2: pointer to builtin function (C callee-saved) - - Label throw_normal_exception; - Label throw_termination_exception; - - // Call into the runtime system. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - false, - false); - - // Do space-specific GC and retry runtime call. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - false); - - // Do full GC and retry runtime call one final time. - Failure* failure = Failure::InternalError(); - __ li(v0, Operand(reinterpret_cast<int32_t>(failure))); - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - true); - - { FrameScope scope(masm, StackFrame::MANUAL); - __ PrepareCallCFunction(0, v0); - __ CallCFunction( - ExternalReference::out_of_memory_function(masm->isolate()), 0); - } + __ Throw(v0); __ bind(&throw_termination_exception); __ ThrowUncatchable(v0); - - __ bind(&throw_normal_exception); - __ Throw(v0); } @@ -1896,7 +1780,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, isolate))); __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0. - __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); + __ LoadRoot(v0, Heap::kExceptionRootIndex); __ b(&exit); // b exposes branch delay slot. __ nop(); // Branch delay slot nop. @@ -2121,7 +2005,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ Branch(&object_not_null, ne, scratch, - Operand(masm->isolate()->factory()->null_value())); + Operand(isolate()->factory()->null_value())); __ li(v0, Operand(Smi::FromInt(1))); __ DropAndRet(HasArgsInRegisters() ? 0 : 2); @@ -2169,7 +2053,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { // -- a1 : receiver // ----------------------------------- __ Branch(&miss, ne, a0, - Operand(masm->isolate()->factory()->prototype_string())); + Operand(isolate()->factory()->prototype_string())); receiver = a1; } else { ASSERT(kind() == Code::LOAD_IC); @@ -2610,8 +2494,6 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { const int kSubjectOffset = 2 * kPointerSize; const int kJSRegExpOffset = 3 * kPointerSize; - Isolate* isolate = masm->isolate(); - Label runtime; // Allocation of registers for this function. These are in callee save // registers and will be preserved by the call to the native RegExp code, as @@ -2626,9 +2508,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Ensure that a RegExp stack is allocated. ExternalReference address_of_regexp_stack_memory_address = ExternalReference::address_of_regexp_stack_memory_address( - isolate); + isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(isolate); + ExternalReference::address_of_regexp_stack_memory_size(isolate()); __ li(a0, Operand(address_of_regexp_stack_memory_size)); __ lw(a0, MemOperand(a0, 0)); __ Branch(&runtime, eq, a0, Operand(zero_reg)); @@ -2775,7 +2657,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // subject: Subject string // regexp_data: RegExp data (FixedArray) // All checks done. Now push arguments for native regexp code. - __ IncrementCounter(isolate->counters()->regexp_entry_native(), + __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, a0, a2); // Isolates: note we add an additional parameter here (isolate pointer). @@ -2799,7 +2681,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 9: Pass current isolate address. // CFunctionArgumentOperand handles MIPS stack argument slots. - __ li(a0, Operand(ExternalReference::isolate_address(isolate))); + __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); __ sw(a0, MemOperand(sp, 5 * kPointerSize)); // Argument 8: Indicate that this is a direct call from JavaScript. @@ -2821,7 +2703,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 5: static offsets vector buffer. __ li(a0, Operand( - ExternalReference::address_of_static_offsets_vector(isolate))); + ExternalReference::address_of_static_offsets_vector(isolate()))); __ sw(a0, MemOperand(sp, 1 * kPointerSize)); // For arguments 4 and 3 get string length, calculate start of string data @@ -2854,7 +2736,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Locate the code entry and call it. __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm, t9); __ LeaveExitFrame(false, no_reg, true); @@ -2876,9 +2758,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // stack overflow (on the backtrack stack) was detected in RegExp code but // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. - __ li(a1, Operand(isolate->factory()->the_hole_value())); + __ li(a1, Operand(isolate()->factory()->the_hole_value())); __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, - isolate))); + isolate()))); __ lw(v0, MemOperand(a2, 0)); __ Branch(&runtime, eq, v0, Operand(a1)); @@ -2896,7 +2778,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ bind(&failure); // For failure and exception return null. - __ li(v0, Operand(isolate->factory()->null_value())); + __ li(v0, Operand(isolate()->factory()->null_value())); __ DropAndRet(4); // Process the result from the native regexp code. @@ -2957,7 +2839,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(isolate); + ExternalReference::address_of_static_offsets_vector(isolate()); __ li(a2, Operand(address_of_static_offsets_vector)); // a1: number of capture registers @@ -3110,7 +2992,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ SmiTag(a0); __ MultiPush(kSavedRegs); - CreateAllocationSiteStub create_stub; + CreateAllocationSiteStub create_stub(masm->isolate()); __ CallStub(&create_stub); __ MultiPop(kSavedRegs); @@ -3135,11 +3017,61 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { } +static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { + __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); + + // Do not transform the receiver for strict mode functions. + int32_t strict_mode_function_mask = + 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); + // Do not transform the receiver for native (Compilerhints already in a3). + int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); + __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); + __ Branch(cont, ne, at, Operand(zero_reg)); +} + + +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ Branch(non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); + __ push(a1); // put proxy as additional argument + __ li(a0, Operand(argc + 1, RelocInfo::NONE32)); + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); + { + Handle<Code> adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ sw(a1, MemOperand(sp, argc * kPointerSize)); + __ li(a0, Operand(argc)); // Set up the number of arguments. + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); +} + + +static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { + // Wrap the receiver and patch it back onto the stack. + { FrameScope frame_scope(masm, StackFrame::INTERNAL); + __ Push(a1, a3); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ pop(a1); + } + __ Branch(USE_DELAY_SLOT, cont); + __ sw(v0, MemOperand(sp, argc * kPointerSize)); +} + + void CallFunctionStub::Generate(MacroAssembler* masm) { // a1 : the function to call - // a2 : feedback vector - // a3 : (only if a2 is not the megamorphic symbol) slot in feedback - // vector (Smi) Label slow, non_function, wrap, cont; if (NeedsChecks()) { @@ -3150,34 +3082,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Goto slow case if we do not have a function. __ GetObjectType(a1, t0, t0); __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); - - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm); - // Type information was updated. Because we may call Array, which - // expects either undefined or an AllocationSite in a2 we need - // to set a2 to undefined. - __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - } } // Fast-case: Invoke the function now. // a1: pushed function - ParameterCount actual(argc_); + int argc = argc_; + ParameterCount actual(argc); if (CallAsMethod()) { if (NeedsChecks()) { - // Do not transform the receiver for strict mode functions and natives. - __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); - int32_t strict_mode_function_mask = - 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); - int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); - __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); - __ Branch(&cont, ne, at, Operand(zero_reg)); + EmitContinueIfStrictOrNative(masm, &cont); } // Compute the receiver in sloppy mode. - __ lw(a3, MemOperand(sp, argc_ * kPointerSize)); + __ lw(a3, MemOperand(sp, argc * kPointerSize)); if (NeedsChecks()) { __ JumpIfSmi(a3, &wrap); @@ -3189,56 +3107,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ bind(&cont); } + __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); if (NeedsChecks()) { // Slow-case: Non-function called. __ bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable - // object (megamorphic symbol) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), - masm->isolate()->heap()->megamorphic_symbol()); - __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); - __ Addu(t1, a2, Operand(t1)); - __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); - __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); - } - // Check for function proxy. - __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); - __ push(a1); // Put proxy as additional argument. - __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); - __ li(a2, Operand(0, RelocInfo::NONE32)); - __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); - { - Handle<Code> adaptor = - masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); - __ Jump(adaptor, RelocInfo::CODE_TARGET); - } - - // CALL_NON_FUNCTION expects the non-function callee as receiver (instead - // of the original receiver from the call site). - __ bind(&non_function); - __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); - __ li(a0, Operand(argc_)); // Set up the number of arguments. - __ li(a2, Operand(0, RelocInfo::NONE32)); - __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), - RelocInfo::CODE_TARGET); + EmitSlowCase(masm, argc, &non_function); } if (CallAsMethod()) { __ bind(&wrap); // Wrap the receiver and patch it back onto the stack. - { FrameScope frame_scope(masm, StackFrame::INTERNAL); - __ Push(a1, a3); - __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); - __ pop(a1); - } - __ mov(a0, v0); - __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); - __ jmp(&cont); + EmitWrapCase(masm, argc, &cont); } } @@ -3302,11 +3183,115 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ bind(&do_call); // Set expected number of arguments to zero (not changing r0). __ li(a2, Operand(0, RelocInfo::NONE32)); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), RelocInfo::CODE_TARGET); } +static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { + __ lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + __ lw(vector, FieldMemOperand(vector, + JSFunction::kSharedFunctionInfoOffset)); + __ lw(vector, FieldMemOperand(vector, + SharedFunctionInfo::kFeedbackVectorOffset)); +} + + +void CallICStub::Generate(MacroAssembler* masm) { + // r1 - function + // r3 - slot id (Smi) + Label extra_checks_or_miss, slow_start; + Label slow, non_function, wrap, cont; + Label have_js_function; + int argc = state_.arg_count(); + ParameterCount actual(argc); + + EmitLoadTypeFeedbackVector(masm, a2); + + // The checks. First, does r1 match the recorded monomorphic target? + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t0, a2, Operand(t0)); + __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); + __ Branch(&extra_checks_or_miss, ne, a1, Operand(t0)); + + __ bind(&have_js_function); + if (state_.CallAsMethod()) { + EmitContinueIfStrictOrNative(masm, &cont); + // Compute the receiver in sloppy mode. + __ lw(a3, MemOperand(sp, argc * kPointerSize)); + + __ JumpIfSmi(a3, &wrap); + __ GetObjectType(a3, t0, t0); + __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); + + __ bind(&cont); + } + + __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); + + __ bind(&slow); + EmitSlowCase(masm, argc, &non_function); + + if (state_.CallAsMethod()) { + __ bind(&wrap); + EmitWrapCase(masm, argc, &cont); + } + + __ bind(&extra_checks_or_miss); + Label miss; + + __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); + __ Branch(&slow_start, eq, t0, Operand(at)); + __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex); + __ Branch(&miss, eq, t0, Operand(at)); + + if (!FLAG_trace_ic) { + // We are going megamorphic, and we don't want to visit the runtime. + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t0, a2, Operand(t0)); + __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); + __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); + __ Branch(&slow_start); + } + + // We are here because tracing is on or we are going monomorphic. + __ bind(&miss); + GenerateMiss(masm); + + // the slow case + __ bind(&slow_start); + // Check that the function is really a JavaScript function. + // r1: pushed function (to be verified) + __ JumpIfSmi(a1, &non_function); + + // Goto slow case if we do not have a function. + __ GetObjectType(a1, t0, t0); + __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); + __ Branch(&have_js_function); +} + + +void CallICStub::GenerateMiss(MacroAssembler* masm) { + // Get the receiver of the function from the stack; 1 ~ return address. + __ lw(t0, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize)); + + { + FrameScope scope(masm, StackFrame::INTERNAL); + + // Push the receiver and the function and feedback info. + __ Push(t0, a1, a2, a3); + + // Call the entry. + ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), + masm->isolate()); + __ CallExternalReference(miss, 4); + + // Move result to a1 and exit the internal frame. + __ mov(a1, v0); + } +} + + // StringCharCodeAtGenerator. void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { Label flat_string; @@ -3532,9 +3517,15 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm, { Label loop; __ bind(&loop); - __ lwr(scratch1, MemOperand(src)); - __ Addu(src, src, Operand(kReadAlignment)); - __ lwl(scratch1, MemOperand(src, -1)); + if (kArchEndian == kBig) { + __ lwl(scratch1, MemOperand(src)); + __ Addu(src, src, Operand(kReadAlignment)); + __ lwr(scratch1, MemOperand(src, -1)); + } else { + __ lwr(scratch1, MemOperand(src)); + __ Addu(src, src, Operand(kReadAlignment)); + __ lwl(scratch1, MemOperand(src, -1)); + } __ sw(scratch1, MemOperand(dest)); __ Addu(dest, dest, Operand(kReadAlignment)); __ Subu(scratch2, limit, dest); @@ -3824,7 +3815,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED); __ bind(&return_v0); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); __ DropAndRet(3); @@ -3968,7 +3959,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop( void StringCompareStub::Generate(MacroAssembler* masm) { Label runtime; - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); // Stack frame on entry. // sp[0]: right string @@ -3999,218 +3990,17 @@ void StringCompareStub::Generate(MacroAssembler* masm) { } -void ArrayPushStub::Generate(MacroAssembler* masm) { - Register receiver = a0; - Register scratch = a1; - - int argc = arguments_count(); - - if (argc == 0) { - // Nothing to do, just return the length. - __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ DropAndRet(argc + 1); - return; - } - - Isolate* isolate = masm->isolate(); - - if (argc != 1) { - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - Label call_builtin, attempt_to_grow_elements, with_write_barrier; - - Register elements = t2; - Register end_elements = t1; - // Get the elements array of the object. - __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check that the elements are in fast mode and writable. - __ CheckMap(elements, - scratch, - Heap::kFixedArrayMapRootIndex, - &call_builtin, - DONT_DO_SMI_CHECK); - } - - // Get the array's length into scratch and calculate new length. - __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ Addu(scratch, scratch, Operand(Smi::FromInt(argc))); - - // Get the elements' length. - __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); - - const int kEndElementsOffset = - FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check if we could survive without allocation. - __ Branch(&attempt_to_grow_elements, gt, scratch, Operand(t0)); - - // Check if value is a smi. - __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); - __ JumpIfNotSmi(t0, &with_write_barrier); - - // Store the value. - // We may need a register containing the address end_elements below, - // so write back the value in end_elements. - __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); - __ Addu(end_elements, elements, end_elements); - __ Addu(end_elements, end_elements, kEndElementsOffset); - __ sw(t0, MemOperand(end_elements)); - } else { - // Check if we could survive without allocation. - __ Branch(&call_builtin, gt, scratch, Operand(t0)); - - __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); - __ StoreNumberToDoubleElements(t0, scratch, elements, a3, t1, a2, - &call_builtin, argc * kDoubleSize); - } - - // Save new length. - __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ mov(v0, scratch); - __ DropAndRet(argc + 1); - - if (IsFastDoubleElementsKind(elements_kind())) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ bind(&with_write_barrier); - - if (IsFastSmiElementsKind(elements_kind())) { - if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); - - __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset)); - __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); - __ Branch(&call_builtin, eq, t3, Operand(at)); - - ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) - ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; - __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); - __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); - __ lw(a3, ContextOperand(a3, Context::JS_ARRAY_MAPS_INDEX)); - const int header_size = FixedArrayBase::kHeaderSize; - // Verify that the object can be transitioned in place. - const int origin_offset = header_size + elements_kind() * kPointerSize; - __ lw(a2, FieldMemOperand(receiver, origin_offset)); - __ lw(at, FieldMemOperand(a3, HeapObject::kMapOffset)); - __ Branch(&call_builtin, ne, a2, Operand(at)); - - - const int target_offset = header_size + target_kind * kPointerSize; - __ lw(a3, FieldMemOperand(a3, target_offset)); - __ mov(a2, receiver); - ElementsTransitionGenerator::GenerateMapChangeElementsTransition( - masm, DONT_TRACK_ALLOCATION_SITE, NULL); - } - - // Save new length. - __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - - // Store the value. - // We may need a register containing the address end_elements below, so write - // back the value in end_elements. - __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); - __ Addu(end_elements, elements, end_elements); - __ Addu(end_elements, end_elements, kEndElementsOffset); - __ sw(t0, MemOperand(end_elements)); - - __ RecordWrite(elements, - end_elements, - t0, - kRAHasNotBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ mov(v0, scratch); - __ DropAndRet(argc + 1); - - __ bind(&attempt_to_grow_elements); - // scratch: array's length + 1. - - if (!FLAG_inline_new) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize)); - // Growing elements that are SMI-only requires special handling in case the - // new element is non-Smi. For now, delegate to the builtin. - if (IsFastSmiElementsKind(elements_kind())) { - __ JumpIfNotSmi(a2, &call_builtin); - } - - // We could be lucky and the elements array could be at the top of new-space. - // In this case we can just grow it in place by moving the allocation pointer - // up. - ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(isolate); - ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(isolate); - - const int kAllocationDelta = 4; - ASSERT(kAllocationDelta >= argc); - // Load top and check if it is the end of elements. - __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); - __ Addu(end_elements, elements, end_elements); - __ Addu(end_elements, end_elements, Operand(kEndElementsOffset)); - __ li(t0, Operand(new_space_allocation_top)); - __ lw(a3, MemOperand(t0)); - __ Branch(&call_builtin, ne, a3, Operand(end_elements)); - - __ li(t3, Operand(new_space_allocation_limit)); - __ lw(t3, MemOperand(t3)); - __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize)); - __ Branch(&call_builtin, hi, a3, Operand(t3)); - - // We fit and could grow elements. - // Update new_space_allocation_top. - __ sw(a3, MemOperand(t0)); - // Push the argument. - __ sw(a2, MemOperand(end_elements)); - // Fill the rest with holes. - __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); - for (int i = 1; i < kAllocationDelta; i++) { - __ sw(a3, MemOperand(end_elements, i * kPointerSize)); - } - - // Update elements' and array's sizes. - __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); - __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); - __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta))); - __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); - - // Elements are in new space, so write barrier is not required. - __ mov(v0, scratch); - __ DropAndRet(argc + 1); - - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); -} - - void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- a1 : left // -- a0 : right // -- ra : return address // ----------------------------------- - Isolate* isolate = masm->isolate(); // Load a2 with the allocation site. We stick an undefined dummy value here // and replace it with the real allocation site later when we instantiate this // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). - __ li(a2, handle(isolate->heap()->undefined_value())); + __ li(a2, handle(isolate()->heap()->undefined_value())); // Make sure that we actually patched the allocation site. if (FLAG_debug_code) { @@ -4223,7 +4013,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // Tail call into the stub that handles binary operations with allocation // sites. - BinaryOpWithAllocationSiteStub stub(state_); + BinaryOpWithAllocationSiteStub stub(isolate(), state_); __ TailCallStub(&stub); } @@ -4319,9 +4109,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&unordered); __ bind(&generic_stub); - ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, + ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, CompareIC::GENERIC); - __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ bind(&maybe_undefined1); if (Token::IsOrderedRelationalCompareOp(op_)) { @@ -4555,7 +4345,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { { // Call the runtime system in a fresh internal frame. ExternalReference miss = - ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); + ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); FrameScope scope(masm, StackFrame::INTERNAL); __ Push(a1, a0); __ Push(ra, a1, a0); @@ -4600,7 +4390,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) { intptr_t loc = - reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); + reinterpret_cast<intptr_t>(GetCode().location()); __ Move(t9, target); __ li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); __ Call(ra); @@ -4675,7 +4465,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, __ MultiPush(spill_mask); __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); __ li(a1, Operand(Handle<Name>(name))); - NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP); __ CallStub(&stub); __ mov(at, v0); __ MultiPop(spill_mask); @@ -4754,7 +4544,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, __ Move(a0, elements); __ Move(a1, name); } - NameDictionaryLookupStub stub(POSITIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP); __ CallStub(&stub); __ mov(scratch2, a2); __ mov(at, v0); @@ -4862,11 +4652,11 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - StoreBufferOverflowStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); + StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); // Hydrogen code stubs need stub2 at snapshot time. - StoreBufferOverflowStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } @@ -4968,12 +4758,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { __ Move(address, regs_.address()); __ Move(a0, regs_.object()); __ Move(a1, address); - __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); AllowExternalCallThatCantCauseGC scope(masm); __ CallCFunction( - ExternalReference::incremental_marking_record_write_function( - masm->isolate()), + ExternalReference::incremental_marking_record_write_function(isolate()), argument_count); regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); } @@ -5133,8 +4922,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { - CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); - __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); + __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); int parameter_count_offset = StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; __ lw(a1, MemOperand(fp, parameter_count_offset)); @@ -5150,7 +4939,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != NULL) { - ProfileEntryHookStub stub; + ProfileEntryHookStub stub(masm->isolate()); __ push(ra); __ CallStub(&stub); __ pop(ra); @@ -5192,18 +4981,18 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { __ Subu(sp, sp, kCArgsSlotsSize); #if defined(V8_HOST_ARCH_MIPS) int32_t entry_hook = - reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook()); + reinterpret_cast<int32_t>(isolate()->function_entry_hook()); __ li(t9, Operand(entry_hook)); #else // Under the simulator we need to indirect the entry hook through a // trampoline function at a known address. // It additionally takes an isolate as a third parameter. - __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); + __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); __ li(t9, Operand(ExternalReference(&dispatcher, ExternalReference::BUILTIN_CALL, - masm->isolate()))); + isolate()))); #endif // Call C function through t9 to conform ABI for PIC. __ Call(t9); @@ -5225,14 +5014,14 @@ template<class T> static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { if (mode == DISABLE_ALLOCATION_SITES) { - T stub(GetInitialFastElementsKind(), mode); + T stub(masm->isolate(), GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { int last_index = GetSequenceIndexFromFastElementsKind( TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= last_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); + T stub(masm->isolate(), kind); __ TailCallStub(&stub, eq, a3, Operand(kind)); } @@ -5273,12 +5062,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); - ArraySingleArgumentConstructorStub stub_holey(holey_initial, + ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), + holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); __ bind(&normal_sequence); - ArraySingleArgumentConstructorStub stub(initial, + ArraySingleArgumentConstructorStub stub(masm->isolate(), + initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -5306,7 +5097,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= last_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - ArraySingleArgumentConstructorStub stub(kind); + ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); __ TailCallStub(&stub, eq, a3, Operand(kind)); } @@ -5324,11 +5115,11 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); - stub.GetCode(isolate); + T stub(isolate, kind); + stub.GetCode(); if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { - T stub1(kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(isolate); + T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); + stub1.GetCode(); } } } @@ -5349,12 +5140,12 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things. - InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); - stubh1.GetCode(isolate); - InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); - stubh2.GetCode(isolate); - InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); - stubh3.GetCode(isolate); + InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); + stubh1.GetCode(); + InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); + stubh2.GetCode(); + InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); + stubh3.GetCode(); } } @@ -5432,10 +5223,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { void InternalArrayConstructorStub::GenerateCase( MacroAssembler* masm, ElementsKind kind) { - InternalArrayNoArgumentConstructorStub stub0(kind); + InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0, lo, a0, Operand(1)); - InternalArrayNArgumentsConstructorStub stubN(kind); + InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); __ TailCallStub(&stubN, hi, a0, Operand(1)); if (IsFastPackedElementsKind(kind)) { @@ -5444,11 +5235,11 @@ void InternalArrayConstructorStub::GenerateCase( __ lw(at, MemOperand(sp, 0)); InternalArraySingleArgumentConstructorStub - stub1_holey(GetHoleyElementsKind(kind)); + stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg)); } - InternalArraySingleArgumentConstructorStub stub1(kind); + InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); } @@ -5538,8 +5329,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kArgsLength == 7); - Isolate* isolate = masm->isolate(); - // Save context, callee and call data. __ Push(context, callee, call_data); // Load context from callee. @@ -5552,7 +5341,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { // Push return value and default return value. __ Push(scratch, scratch); __ li(scratch, - Operand(ExternalReference::isolate_address(isolate))); + Operand(ExternalReference::isolate_address(isolate()))); // Push isolate and holder. __ Push(scratch, holder); @@ -5582,11 +5371,8 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize)); const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); - ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_function_callback(isolate()); AllowExternalCallThatCantCauseGC scope(masm); MemOperand context_restore_operand( @@ -5632,12 +5418,8 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; - Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); - ExternalReference::Type thunk_type = - ExternalReference::PROFILING_GETTER_CALL; - ApiFunction thunk_fun(thunk_address); - ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, - masm->isolate()); + ExternalReference thunk_ref = + ExternalReference::invoke_accessor_getter_callback(isolate()); __ CallApiFunctionAndReturn(api_function_address, thunk_ref, kStackUnwindSpace, diff --git a/deps/v8/src/mips/code-stubs-mips.h b/deps/v8/src/mips/code-stubs-mips.h index e71c30583..64577f903 100644 --- a/deps/v8/src/mips/code-stubs-mips.h +++ b/deps/v8/src/mips/code-stubs-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_CODE_STUBS_ARM_H_ #define V8_MIPS_CODE_STUBS_ARM_H_ @@ -40,8 +17,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); class StoreBufferOverflowStub: public PlatformCodeStub { public: - explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) - : save_doubles_(save_fp) {} + StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp) + : PlatformCodeStub(isolate), save_doubles_(save_fp) {} void Generate(MacroAssembler* masm); @@ -93,7 +70,7 @@ class StringHelper : public AllStatic { class SubStringStub: public PlatformCodeStub { public: - SubStringStub() {} + explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: Major MajorKey() { return SubString; } @@ -104,8 +81,8 @@ class SubStringStub: public PlatformCodeStub { class StoreRegistersStateStub: public PlatformCodeStub { public: - explicit StoreRegistersStateStub(SaveFPRegsMode with_fp) - : save_doubles_(with_fp) {} + explicit StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp) + : PlatformCodeStub(isolate), save_doubles_(with_fp) {} static void GenerateAheadOfTime(Isolate* isolate); private: @@ -118,8 +95,8 @@ class StoreRegistersStateStub: public PlatformCodeStub { class RestoreRegistersStateStub: public PlatformCodeStub { public: - explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp) - : save_doubles_(with_fp) {} + explicit RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp) + : PlatformCodeStub(isolate), save_doubles_(with_fp) {} static void GenerateAheadOfTime(Isolate* isolate); private: @@ -132,7 +109,7 @@ class RestoreRegistersStateStub: public PlatformCodeStub { class StringCompareStub: public PlatformCodeStub { public: - StringCompareStub() { } + explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { } // Compare two flat ASCII strings and returns result in v0. static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, @@ -173,11 +150,13 @@ class StringCompareStub: public PlatformCodeStub { // so you don't have to set up the frame. class WriteInt32ToHeapNumberStub : public PlatformCodeStub { public: - WriteInt32ToHeapNumberStub(Register the_int, + WriteInt32ToHeapNumberStub(Isolate* isolate, + Register the_int, Register the_heap_number, Register scratch, Register scratch2) - : the_int_(the_int), + : PlatformCodeStub(isolate), + the_int_(the_int), the_heap_number_(the_heap_number), scratch_(scratch), sign_(scratch2) { @@ -216,12 +195,14 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub { class RecordWriteStub: public PlatformCodeStub { public: - RecordWriteStub(Register object, + RecordWriteStub(Isolate* isolate, + Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) - : object_(object), + : PlatformCodeStub(isolate), + object_(object), value_(value), address_(address), remembered_set_action_(remembered_set_action), @@ -406,7 +387,7 @@ class RecordWriteStub: public PlatformCodeStub { // moved by GC class DirectCEntryStub: public PlatformCodeStub { public: - DirectCEntryStub() {} + explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} void Generate(MacroAssembler* masm); void GenerateCall(MacroAssembler* masm, Register target); @@ -422,7 +403,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub { public: enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; - explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { } + NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) + : PlatformCodeStub(isolate), mode_(mode) { } void Generate(MacroAssembler* masm); diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc index 1b79433d3..adf6d37d8 100644 --- a/deps/v8/src/mips/codegen-mips.cc +++ b/deps/v8/src/mips/codegen-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -67,21 +44,13 @@ UnaryMathFunction CreateExpFunction() { Register temp2 = t1; Register temp3 = t2; - if (!IsMipsSoftFloatABI) { - // Input value is in f12 anyway, nothing to do. - } else { - __ Move(input, a0, a1); - } + __ MovFromFloatParameter(input); __ Push(temp3, temp2, temp1); MathExpGenerator::EmitMathExp( &masm, input, result, double_scratch1, double_scratch2, temp1, temp2, temp3); __ Pop(temp3, temp2, temp1); - if (!IsMipsSoftFloatABI) { - // Result is already in f0, nothing to do. - } else { - __ Move(v0, v1, result); - } + __ MovToFloatResult(result); __ Ret(); } @@ -103,14 +72,10 @@ UnaryMathFunction CreateExpFunction() { #if defined(V8_HOST_ARCH_MIPS) OS::MemCopyUint8Function CreateMemCopyUint8Function( - OS::MemCopyUint8Function stub) { + OS::MemCopyUint8Function stub) { #if defined(USE_SIMULATOR) return stub; #else - if (Serializer::enabled()) { - return stub; - } - size_t actual_size; byte* buffer = static_cast<byte*>(OS::Allocate(3 * KB, &actual_size, true)); if (buffer == NULL) return stub; @@ -167,11 +132,17 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ beq(a3, zero_reg, &aligned); // Already aligned. __ subu(a2, a2, a3); // In delay slot. a2 is the remining bytes count. - __ lwr(t8, MemOperand(a1)); - __ addu(a1, a1, a3); - __ swr(t8, MemOperand(a0)); - __ addu(a0, a0, a3); - + if (kArchEndian == kLittle) { + __ lwr(t8, MemOperand(a1)); + __ addu(a1, a1, a3); + __ swr(t8, MemOperand(a0)); + __ addu(a0, a0, a3); + } else { + __ lwl(t8, MemOperand(a1)); + __ addu(a1, a1, a3); + __ swl(t8, MemOperand(a0)); + __ addu(a0, a0, a3); + } // Now dst/src are both aligned to (word) aligned addresses. Set a2 to // count how many bytes we have to copy after all the 64 byte chunks are // copied and a3 to the dst pointer after all the 64 byte chunks have been @@ -323,12 +294,21 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ beq(a3, zero_reg, &ua_chk16w); __ subu(a2, a2, a3); // In delay slot. - __ lwr(v1, MemOperand(a1)); - __ lwl(v1, - MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); - __ addu(a1, a1, a3); - __ swr(v1, MemOperand(a0)); - __ addu(a0, a0, a3); + if (kArchEndian == kLittle) { + __ lwr(v1, MemOperand(a1)); + __ lwl(v1, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ addu(a1, a1, a3); + __ swr(v1, MemOperand(a0)); + __ addu(a0, a0, a3); + } else { + __ lwl(v1, MemOperand(a1)); + __ lwr(v1, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ addu(a1, a1, a3); + __ swl(v1, MemOperand(a0)); + __ addu(a0, a0, a3); + } // Now the dst (but not the source) is aligned. Set a2 to count how many // bytes we have to copy after all the 64 byte chunks are copied and a3 to @@ -357,40 +337,77 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ bind(&ua_loop16w); __ Pref(pref_hint_load, MemOperand(a1, 3 * pref_chunk)); - __ lwr(t0, MemOperand(a1)); - __ lwr(t1, MemOperand(a1, 1, loadstore_chunk)); - __ lwr(t2, MemOperand(a1, 2, loadstore_chunk)); - - if (pref_hint_store == kPrefHintPrepareForStore) { - __ sltu(v1, t9, a0); - __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); + if (kArchEndian == kLittle) { + __ lwr(t0, MemOperand(a1)); + __ lwr(t1, MemOperand(a1, 1, loadstore_chunk)); + __ lwr(t2, MemOperand(a1, 2, loadstore_chunk)); + + if (pref_hint_store == kPrefHintPrepareForStore) { + __ sltu(v1, t9, a0); + __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); + } + __ lwr(t3, MemOperand(a1, 3, loadstore_chunk)); // Maybe in delay slot. + + __ Pref(pref_hint_store, MemOperand(a0, 4 * pref_chunk)); + __ Pref(pref_hint_store, MemOperand(a0, 5 * pref_chunk)); + + __ bind(&ua_skip_pref); + __ lwr(t4, MemOperand(a1, 4, loadstore_chunk)); + __ lwr(t5, MemOperand(a1, 5, loadstore_chunk)); + __ lwr(t6, MemOperand(a1, 6, loadstore_chunk)); + __ lwr(t7, MemOperand(a1, 7, loadstore_chunk)); + __ lwl(t0, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t1, + MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t2, + MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t3, + MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t4, + MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t5, + MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t6, + MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t7, + MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); + } else { + __ lwl(t0, MemOperand(a1)); + __ lwl(t1, MemOperand(a1, 1, loadstore_chunk)); + __ lwl(t2, MemOperand(a1, 2, loadstore_chunk)); + + if (pref_hint_store == kPrefHintPrepareForStore) { + __ sltu(v1, t9, a0); + __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); + } + __ lwl(t3, MemOperand(a1, 3, loadstore_chunk)); // Maybe in delay slot. + + __ Pref(pref_hint_store, MemOperand(a0, 4 * pref_chunk)); + __ Pref(pref_hint_store, MemOperand(a0, 5 * pref_chunk)); + + __ bind(&ua_skip_pref); + __ lwl(t4, MemOperand(a1, 4, loadstore_chunk)); + __ lwl(t5, MemOperand(a1, 5, loadstore_chunk)); + __ lwl(t6, MemOperand(a1, 6, loadstore_chunk)); + __ lwl(t7, MemOperand(a1, 7, loadstore_chunk)); + __ lwr(t0, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t1, + MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t2, + MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t3, + MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t4, + MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t5, + MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t6, + MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t7, + MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); } - __ lwr(t3, MemOperand(a1, 3, loadstore_chunk)); // Maybe in delay slot. - - __ Pref(pref_hint_store, MemOperand(a0, 4 * pref_chunk)); - __ Pref(pref_hint_store, MemOperand(a0, 5 * pref_chunk)); - - __ bind(&ua_skip_pref); - __ lwr(t4, MemOperand(a1, 4, loadstore_chunk)); - __ lwr(t5, MemOperand(a1, 5, loadstore_chunk)); - __ lwr(t6, MemOperand(a1, 6, loadstore_chunk)); - __ lwr(t7, MemOperand(a1, 7, loadstore_chunk)); - __ lwl(t0, - MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t1, - MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t2, - MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t3, - MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t4, - MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t5, - MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t6, - MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t7, - MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); __ Pref(pref_hint_load, MemOperand(a1, 4 * pref_chunk)); __ sw(t0, MemOperand(a0)); __ sw(t1, MemOperand(a0, 1, loadstore_chunk)); @@ -400,30 +417,57 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ sw(t5, MemOperand(a0, 5, loadstore_chunk)); __ sw(t6, MemOperand(a0, 6, loadstore_chunk)); __ sw(t7, MemOperand(a0, 7, loadstore_chunk)); - __ lwr(t0, MemOperand(a1, 8, loadstore_chunk)); - __ lwr(t1, MemOperand(a1, 9, loadstore_chunk)); - __ lwr(t2, MemOperand(a1, 10, loadstore_chunk)); - __ lwr(t3, MemOperand(a1, 11, loadstore_chunk)); - __ lwr(t4, MemOperand(a1, 12, loadstore_chunk)); - __ lwr(t5, MemOperand(a1, 13, loadstore_chunk)); - __ lwr(t6, MemOperand(a1, 14, loadstore_chunk)); - __ lwr(t7, MemOperand(a1, 15, loadstore_chunk)); - __ lwl(t0, - MemOperand(a1, 9, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t1, - MemOperand(a1, 10, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t2, - MemOperand(a1, 11, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t3, - MemOperand(a1, 12, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t4, - MemOperand(a1, 13, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t5, - MemOperand(a1, 14, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t6, - MemOperand(a1, 15, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t7, - MemOperand(a1, 16, loadstore_chunk, MemOperand::offset_minus_one)); + if (kArchEndian == kLittle) { + __ lwr(t0, MemOperand(a1, 8, loadstore_chunk)); + __ lwr(t1, MemOperand(a1, 9, loadstore_chunk)); + __ lwr(t2, MemOperand(a1, 10, loadstore_chunk)); + __ lwr(t3, MemOperand(a1, 11, loadstore_chunk)); + __ lwr(t4, MemOperand(a1, 12, loadstore_chunk)); + __ lwr(t5, MemOperand(a1, 13, loadstore_chunk)); + __ lwr(t6, MemOperand(a1, 14, loadstore_chunk)); + __ lwr(t7, MemOperand(a1, 15, loadstore_chunk)); + __ lwl(t0, + MemOperand(a1, 9, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t1, + MemOperand(a1, 10, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t2, + MemOperand(a1, 11, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t3, + MemOperand(a1, 12, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t4, + MemOperand(a1, 13, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t5, + MemOperand(a1, 14, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t6, + MemOperand(a1, 15, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t7, + MemOperand(a1, 16, loadstore_chunk, MemOperand::offset_minus_one)); + } else { + __ lwl(t0, MemOperand(a1, 8, loadstore_chunk)); + __ lwl(t1, MemOperand(a1, 9, loadstore_chunk)); + __ lwl(t2, MemOperand(a1, 10, loadstore_chunk)); + __ lwl(t3, MemOperand(a1, 11, loadstore_chunk)); + __ lwl(t4, MemOperand(a1, 12, loadstore_chunk)); + __ lwl(t5, MemOperand(a1, 13, loadstore_chunk)); + __ lwl(t6, MemOperand(a1, 14, loadstore_chunk)); + __ lwl(t7, MemOperand(a1, 15, loadstore_chunk)); + __ lwr(t0, + MemOperand(a1, 9, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t1, + MemOperand(a1, 10, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t2, + MemOperand(a1, 11, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t3, + MemOperand(a1, 12, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t4, + MemOperand(a1, 13, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t5, + MemOperand(a1, 14, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t6, + MemOperand(a1, 15, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t7, + MemOperand(a1, 16, loadstore_chunk, MemOperand::offset_minus_one)); + } __ Pref(pref_hint_load, MemOperand(a1, 5 * pref_chunk)); __ sw(t0, MemOperand(a0, 8, loadstore_chunk)); __ sw(t1, MemOperand(a0, 9, loadstore_chunk)); @@ -447,30 +491,57 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ beq(a2, t8, &ua_chk1w); __ nop(); // In delay slot. - __ lwr(t0, MemOperand(a1)); - __ lwr(t1, MemOperand(a1, 1, loadstore_chunk)); - __ lwr(t2, MemOperand(a1, 2, loadstore_chunk)); - __ lwr(t3, MemOperand(a1, 3, loadstore_chunk)); - __ lwr(t4, MemOperand(a1, 4, loadstore_chunk)); - __ lwr(t5, MemOperand(a1, 5, loadstore_chunk)); - __ lwr(t6, MemOperand(a1, 6, loadstore_chunk)); - __ lwr(t7, MemOperand(a1, 7, loadstore_chunk)); - __ lwl(t0, - MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t1, - MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t2, - MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t3, - MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t4, - MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t5, - MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t6, - MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); - __ lwl(t7, - MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); + if (kArchEndian == kLittle) { + __ lwr(t0, MemOperand(a1)); + __ lwr(t1, MemOperand(a1, 1, loadstore_chunk)); + __ lwr(t2, MemOperand(a1, 2, loadstore_chunk)); + __ lwr(t3, MemOperand(a1, 3, loadstore_chunk)); + __ lwr(t4, MemOperand(a1, 4, loadstore_chunk)); + __ lwr(t5, MemOperand(a1, 5, loadstore_chunk)); + __ lwr(t6, MemOperand(a1, 6, loadstore_chunk)); + __ lwr(t7, MemOperand(a1, 7, loadstore_chunk)); + __ lwl(t0, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t1, + MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t2, + MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t3, + MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t4, + MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t5, + MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t6, + MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwl(t7, + MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); + } else { + __ lwl(t0, MemOperand(a1)); + __ lwl(t1, MemOperand(a1, 1, loadstore_chunk)); + __ lwl(t2, MemOperand(a1, 2, loadstore_chunk)); + __ lwl(t3, MemOperand(a1, 3, loadstore_chunk)); + __ lwl(t4, MemOperand(a1, 4, loadstore_chunk)); + __ lwl(t5, MemOperand(a1, 5, loadstore_chunk)); + __ lwl(t6, MemOperand(a1, 6, loadstore_chunk)); + __ lwl(t7, MemOperand(a1, 7, loadstore_chunk)); + __ lwr(t0, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t1, + MemOperand(a1, 2, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t2, + MemOperand(a1, 3, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t3, + MemOperand(a1, 4, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t4, + MemOperand(a1, 5, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t5, + MemOperand(a1, 6, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t6, + MemOperand(a1, 7, loadstore_chunk, MemOperand::offset_minus_one)); + __ lwr(t7, + MemOperand(a1, 8, loadstore_chunk, MemOperand::offset_minus_one)); + } __ addiu(a1, a1, 8 * loadstore_chunk); __ sw(t0, MemOperand(a0)); __ sw(t1, MemOperand(a0, 1, loadstore_chunk)); @@ -491,9 +562,15 @@ OS::MemCopyUint8Function CreateMemCopyUint8Function( __ addu(a3, a0, a3); __ bind(&ua_wordCopy_loop); - __ lwr(v1, MemOperand(a1)); - __ lwl(v1, - MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + if (kArchEndian == kLittle) { + __ lwr(v1, MemOperand(a1)); + __ lwl(v1, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + } else { + __ lwl(v1, MemOperand(a1)); + __ lwr(v1, + MemOperand(a1, 1, loadstore_chunk, MemOperand::offset_minus_one)); + } __ addiu(a0, a0, loadstore_chunk); __ addiu(a1, a1, loadstore_chunk); __ bne(a0, a3, &ua_wordCopy_loop); @@ -722,8 +799,8 @@ void ElementsTransitionGenerator::GenerateSmiToDouble( __ LoadRoot(at, Heap::kTheHoleValueRootIndex); __ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(t5)); } - __ sw(t0, MemOperand(t3)); // mantissa - __ sw(t1, MemOperand(t3, kIntSize)); // exponent + __ sw(t0, MemOperand(t3, Register::kMantissaOffset)); // mantissa + __ sw(t1, MemOperand(t3, Register::kExponentOffset)); // exponent __ Addu(t3, t3, kDoubleSize); __ bind(&entry); @@ -773,7 +850,9 @@ void ElementsTransitionGenerator::GenerateDoubleToObject( __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); // Prepare for conversion loop. - __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); + __ Addu(t0, t0, Operand( + FixedDoubleArray::kHeaderSize - kHeapObjectTag + + Register::kExponentOffset)); __ Addu(a3, t2, Operand(FixedArray::kHeaderSize)); __ Addu(t2, t2, Operand(kHeapObjectTag)); __ sll(t1, t1, 1); @@ -782,7 +861,8 @@ void ElementsTransitionGenerator::GenerateDoubleToObject( __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex); // Using offsetted addresses. // a3: begin of destination FixedArray element fields, not tagged - // t0: begin of source FixedDoubleArray element fields, not tagged, +4 + // t0: begin of source FixedDoubleArray element fields, not tagged, + // points to the exponent // t1: end of destination FixedArray, not tagged // t2: destination FixedArray // t3: the-hole pointer @@ -805,7 +885,9 @@ void ElementsTransitionGenerator::GenerateDoubleToObject( // Non-hole double, copy value into a heap number. __ AllocateHeapNumber(a2, a0, t6, t5, &gc_required); // a2: new heap number - __ lw(a0, MemOperand(t0, -12)); + // Load mantissa of current element, t0 point to exponent of next element. + __ lw(a0, MemOperand(t0, (Register::kMantissaOffset + - Register::kExponentOffset - kDoubleSize))); __ sw(a0, FieldMemOperand(a2, HeapNumber::kMantissaOffset)); __ sw(a1, FieldMemOperand(a2, HeapNumber::kExponentOffset)); __ mov(a0, a3); @@ -1010,8 +1092,8 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, __ li(temp3, Operand(ExternalReference::math_exp_log_table())); __ sll(at, temp2, 3); __ Addu(temp3, temp3, Operand(at)); - __ lw(temp2, MemOperand(temp3, 0)); - __ lw(temp3, MemOperand(temp3, kPointerSize)); + __ lw(temp2, MemOperand(temp3, Register::kMantissaOffset)); + __ lw(temp3, MemOperand(temp3, Register::kExponentOffset)); // The first word is loaded is the lower number register. if (temp2.code() < temp3.code()) { __ sll(at, temp1, 20); @@ -1040,42 +1122,42 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, static const uint32_t kCodeAgePatchFirstInstruction = 0x00010180; #endif -static byte* GetNoCodeAgeSequence(uint32_t* length) { - // The sequence of instructions that is patched out for aging code is the - // following boilerplate stack-building prologue that is found in FUNCTIONS - static bool initialized = false; - static uint32_t sequence[kNoCodeAgeSequenceLength]; - byte* byte_sequence = reinterpret_cast<byte*>(sequence); - *length = kNoCodeAgeSequenceLength * Assembler::kInstrSize; - if (!initialized) { - // Since patcher is a large object, allocate it dynamically when needed, - // to avoid overloading the stack in stress conditions. - SmartPointer<CodePatcher> - patcher(new CodePatcher(byte_sequence, kNoCodeAgeSequenceLength)); - PredictableCodeSizeScope scope(patcher->masm(), *length); - patcher->masm()->Push(ra, fp, cp, a1); - patcher->masm()->nop(Assembler::CODE_AGE_SEQUENCE_NOP); - patcher->masm()->Addu( - fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); - initialized = true; - } - return byte_sequence; + +CodeAgingHelper::CodeAgingHelper() { + ASSERT(young_sequence_.length() == kNoCodeAgeSequenceLength); + // Since patcher is a large object, allocate it dynamically when needed, + // to avoid overloading the stack in stress conditions. + // DONT_FLUSH is used because the CodeAgingHelper is initialized early in + // the process, before MIPS simulator ICache is setup. + SmartPointer<CodePatcher> patcher( + new CodePatcher(young_sequence_.start(), + young_sequence_.length() / Assembler::kInstrSize, + CodePatcher::DONT_FLUSH)); + PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length()); + patcher->masm()->Push(ra, fp, cp, a1); + patcher->masm()->nop(Assembler::CODE_AGE_SEQUENCE_NOP); + patcher->masm()->Addu( + fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); } -bool Code::IsYoungSequence(byte* sequence) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); - bool result = !memcmp(sequence, young_sequence, young_length); - ASSERT(result || - Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction); +#ifdef DEBUG +bool CodeAgingHelper::IsOld(byte* candidate) const { + return Memory::uint32_at(candidate) == kCodeAgePatchFirstInstruction; +} +#endif + + +bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) { + bool result = isolate->code_aging_helper()->IsYoung(sequence); + ASSERT(result || isolate->code_aging_helper()->IsOld(sequence)); return result; } -void Code::GetCodeAgeAndParity(byte* sequence, Age* age, +void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity) { - if (IsYoungSequence(sequence)) { + if (IsYoungSequence(isolate, sequence)) { *age = kNoAgeCodeAge; *parity = NO_MARKING_PARITY; } else { @@ -1091,10 +1173,9 @@ void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age, MarkingParity parity) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); + uint32_t young_length = isolate->code_aging_helper()->young_sequence_length(); if (age == kNoAgeCodeAge) { - CopyBytes(sequence, young_sequence, young_length); + isolate->code_aging_helper()->CopyYoungSequenceTo(sequence); CPU::FlushICache(sequence, young_length); } else { Code* stub = GetCodeAgeStub(isolate, age, parity); diff --git a/deps/v8/src/mips/codegen-mips.h b/deps/v8/src/mips/codegen-mips.h index efbcb3ce3..23b8fe339 100644 --- a/deps/v8/src/mips/codegen-mips.h +++ b/deps/v8/src/mips/codegen-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_CODEGEN_MIPS_H_ diff --git a/deps/v8/src/mips/constants-mips.cc b/deps/v8/src/mips/constants-mips.cc index 2dd7a31f3..db6e9c63b 100644 --- a/deps/v8/src/mips/constants-mips.cc +++ b/deps/v8/src/mips/constants-mips.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/mips/constants-mips.h b/deps/v8/src/mips/constants-mips.h index dcf8b82db..6aeb1195b 100644 --- a/deps/v8/src/mips/constants-mips.h +++ b/deps/v8/src/mips/constants-mips.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_CONSTANTS_H_ #define V8_MIPS_CONSTANTS_H_ @@ -55,6 +32,18 @@ enum ArchVariants { static const ArchVariants kArchVariant = kMips32r1; #endif +enum Endianness { + kLittle, + kBig +}; + +#if defined(V8_TARGET_LITTLE_ENDIAN) + static const Endianness kArchEndian = kLittle; +#elif defined(V8_TARGET_BIG_ENDIAN) + static const Endianness kArchEndian = kBig; +#else +#error Unknown endianness +#endif #if(defined(__mips_hard_float) && __mips_hard_float != 0) // Use floating-point coprocessor instructions. This flag is raised when @@ -69,6 +58,15 @@ const bool IsMipsSoftFloatABI = true; const bool IsMipsSoftFloatABI = true; #endif +#if defined(V8_TARGET_LITTLE_ENDIAN) +const uint32_t kHoleNanUpper32Offset = 4; +const uint32_t kHoleNanLower32Offset = 0; +#elif defined(V8_TARGET_BIG_ENDIAN) +const uint32_t kHoleNanUpper32Offset = 0; +const uint32_t kHoleNanLower32Offset = 4; +#else +#error Unknown endianness +#endif // Defines constants and accessor classes to assemble, disassemble and // simulate MIPS32 instructions. diff --git a/deps/v8/src/mips/cpu-mips.cc b/deps/v8/src/mips/cpu-mips.cc index 49d0b377e..71e3ddc79 100644 --- a/deps/v8/src/mips/cpu-mips.cc +++ b/deps/v8/src/mips/cpu-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // CPU specific code for arm independent of OS goes here. @@ -47,16 +24,6 @@ namespace v8 { namespace internal { -void CPU::SetUp() { - CpuFeatures::Probe(); -} - - -bool CPU::SupportsCrankshaft() { - return CpuFeatures::IsSupported(FPU); -} - - void CPU::FlushICache(void* start, size_t size) { // Nothing to do, flushing no instructions. if (size == 0) { diff --git a/deps/v8/src/mips/debug-mips.cc b/deps/v8/src/mips/debug-mips.cc index b9bf69db4..fcb5643d1 100644 --- a/deps/v8/src/mips/debug-mips.cc +++ b/deps/v8/src/mips/debug-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. @@ -37,8 +14,6 @@ namespace v8 { namespace internal { -#ifdef ENABLE_DEBUGGER_SUPPORT - bool BreakLocationIterator::IsDebugBreakAtReturn() { return Debug::IsDebugBreakAtReturn(rinfo()); } @@ -58,9 +33,8 @@ void BreakLocationIterator::SetDebugBreakAtReturn() { ASSERT(Assembler::kJSReturnSequenceInstructions == 7); CodePatcher patcher(rinfo()->pc(), Assembler::kJSReturnSequenceInstructions); // li and Call pseudo-instructions emit two instructions each. - patcher.masm()->li(v8::internal::t9, - Operand(reinterpret_cast<int32_t>( - debug_info_->GetIsolate()->debug()->debug_break_return()->entry()))); + patcher.masm()->li(v8::internal::t9, Operand(reinterpret_cast<int32_t>( + debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry()))); patcher.masm()->Call(v8::internal::t9); patcher.masm()->nop(); patcher.masm()->nop(); @@ -105,7 +79,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() { // call t9 (jalr t9 / nop instruction pair) CodePatcher patcher(rinfo()->pc(), Assembler::kDebugBreakSlotInstructions); patcher.masm()->li(v8::internal::t9, Operand(reinterpret_cast<int32_t>( - debug_info_->GetIsolate()->debug()->debug_break_slot()->entry()))); + debug_info_->GetIsolate()->builtins()->Slot_DebugBreak()->entry()))); patcher.masm()->Call(v8::internal::t9); } @@ -156,7 +130,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ PrepareCEntryArgs(0); // No arguments. __ PrepareCEntryFunction(ExternalReference::debug_break(masm->isolate())); - CEntryStub ceb(1); + CEntryStub ceb(masm->isolate(), 1); __ CallStub(&ceb); // Restore the register values from the expression stack. @@ -188,6 +162,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } +void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) { + // Register state for CallICStub + // ----------- S t a t e ------------- + // -- a1 : function + // -- a3 : slot in feedback array (smi) + // ----------------------------------- + Generate_DebugBreakCallHelper(masm, a1.bit() | a3.bit(), 0); +} + + void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Calling convention for IC load (from ic-mips.cc). // ----------- S t a t e ------------- @@ -244,15 +228,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { - // Calling convention for IC call (from ic-mips.cc). - // ----------- S t a t e ------------- - // -- a2: name - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, a2.bit(), 0); -} - - void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { // In places other than IC call sites it is expected that v0 is TOS which // is an object - this is not generally the case so this should be used with @@ -270,17 +245,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { - // Register state for CallFunctionStub (from code-stubs-mips.cc). - // ----------- S t a t e ------------- - // -- a1 : function - // -- a2 : feedback array - // -- a3 : slot in feedback array - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit() | a3.bit(), 0); -} - - void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { // Calling convention for CallConstructStub (from code-stubs-mips.cc). // ----------- S t a t e ------------- @@ -339,9 +303,6 @@ const bool Debug::kFrameDropperSupported = false; #undef __ - -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal #endif // V8_TARGET_ARCH_MIPS diff --git a/deps/v8/src/mips/deoptimizer-mips.cc b/deps/v8/src/mips/deoptimizer-mips.cc index 0cd5e2ccd..4297ad127 100644 --- a/deps/v8/src/mips/deoptimizer-mips.cc +++ b/deps/v8/src/mips/deoptimizer-mips.cc @@ -1,30 +1,7 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -53,7 +30,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { // Fail hard and early if we enter this code object again. byte* pointer = code->FindCodeAgeSequence(); if (pointer != NULL) { - pointer += kNoCodeAgeSequenceLength * Assembler::kInstrSize; + pointer += kNoCodeAgeSequenceLength; } else { pointer = code->instruction_start(); } diff --git a/deps/v8/src/mips/disasm-mips.cc b/deps/v8/src/mips/disasm-mips.cc index 1ae034035..52f33d25d 100644 --- a/deps/v8/src/mips/disasm-mips.cc +++ b/deps/v8/src/mips/disasm-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // A Disassembler object is used to disassemble a block of code instruction by // instruction. The default implementation of the NameConverter object can be diff --git a/deps/v8/src/mips/frames-mips.cc b/deps/v8/src/mips/frames-mips.cc index 20f071266..205e9bcf4 100644 --- a/deps/v8/src/mips/frames-mips.cc +++ b/deps/v8/src/mips/frames-mips.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/mips/frames-mips.h b/deps/v8/src/mips/frames-mips.h index 0ec2cbb86..c7b88aa5c 100644 --- a/deps/v8/src/mips/frames-mips.h +++ b/deps/v8/src/mips/frames-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc index 87c0764b6..ff280ce76 100644 --- a/deps/v8/src/mips/full-codegen-mips.cc +++ b/deps/v8/src/mips/full-codegen-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -127,10 +104,14 @@ static void EmitStackCheck(MacroAssembler* masm_, Isolate* isolate = masm_->isolate(); Label ok; ASSERT(scratch.is(sp) == (pointers == 0)); + Heap::RootListIndex index; if (pointers != 0) { __ Subu(scratch, sp, Operand(pointers * kPointerSize)); + index = Heap::kRealStackLimitRootIndex; + } else { + index = Heap::kStackLimitRootIndex; } - __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex); + __ LoadRoot(stack_limit_scratch, index); __ Branch(&ok, hs, scratch, Operand(stack_limit_scratch)); PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize); __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET); @@ -157,8 +138,6 @@ void FullCodeGenerator::Generate() { handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); - InitializeFeedbackVector(); - profiling_counter_ = isolate()->factory()->NewCell( Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -245,7 +224,7 @@ void FullCodeGenerator::Generate() { __ Push(info->scope()->GetScopeInfo()); __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(a1); @@ -306,7 +285,7 @@ void FullCodeGenerator::Generate() { } else { type = ArgumentsAccessStub::NEW_SLOPPY_FAST; } - ArgumentsAccessStub stub(type); + ArgumentsAccessStub stub(isolate(), type); __ CallStub(&stub); SetVar(arguments, v0, a1, a2); @@ -1198,12 +1177,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle<Object> feedback = Handle<Object>( - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), - isolate()); - StoreFeedbackVectorSlot(slot, feedback); __ li(a1, FeedbackVector()); - __ li(a2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); + __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot))); __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check @@ -1364,7 +1339,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode(), info->is_generator()); + FastNewClosureStub stub(isolate(), + info->strict_mode(), + info->is_generator()); __ li(a2, Operand(info)); __ CallStub(&stub); } else { @@ -1683,13 +1660,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { : ObjectLiteral::kNoFlags; __ li(a0, Operand(Smi::FromInt(flags))); int properties_count = constant_properties->length() / 2; - if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || - flags != ObjectLiteral::kFastElements || + if (expr->may_store_doubles() || expr->depth() > 1 || + Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { __ Push(a3, a2, a1, a0); __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4); } else { - FastCloneShallowObjectStub stub(properties_count); + FastCloneShallowObjectStub stub(isolate(), properties_count); __ CallStub(&stub); } @@ -1831,13 +1808,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { if (has_fast_elements && constant_elements_values->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( + isolate(), FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, allocation_site_mode, length); __ CallStub(&stub); __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1, a1, a2); - } else if (expr->depth() > 1 || Serializer::enabled() || + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ li(a0, Operand(Smi::FromInt(flags))); __ Push(a3, a2, a1, a0); @@ -1852,7 +1830,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; } - FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, + length); __ CallStub(&stub); } @@ -1886,7 +1865,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else { __ li(a3, Operand(Smi::FromInt(i))); __ mov(a0, result_register()); - StoreArrayLiteralElementStub stub; + StoreArrayLiteralElementStub stub(isolate()); __ CallStub(&stub); } @@ -1902,7 +1881,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { void FullCodeGenerator::VisitAssignment(Assignment* expr) { - ASSERT(expr->target()->IsValidLeftHandSide()); + ASSERT(expr->target()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ Assignment"); @@ -2134,7 +2113,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) { __ mov(a0, v0); __ mov(a1, a0); __ sw(a1, MemOperand(sp, 2 * kPointerSize)); - CallFunctionStub stub(1, CALL_AS_METHOD); + CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); __ CallStub(&stub); __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2281,7 +2260,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Label gc_required; Label allocated; - Handle<Map> map(isolate()->native_context()->generator_result_map()); + Handle<Map> map(isolate()->native_context()->iterator_result_map()); __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); __ jmp(&allocated); @@ -2355,8 +2334,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, patch_site.EmitJumpIfSmi(scratch1, &smi_case); __ bind(&stub_call); - BinaryOpICStub stub(op, mode); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + BinaryOpICStub stub(isolate(), op, mode); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); __ jmp(&done); @@ -2433,16 +2412,16 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, OverwriteMode mode) { __ mov(a0, result_register()); __ pop(a1); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); context()->Plug(v0); } void FullCodeGenerator::EmitAssignment(Expression* expr) { - ASSERT(expr->IsValidLeftHandSide()); + ASSERT(expr->IsValidReferenceExpression()); // Left-hand side can only be a property, a global or a (parameter or local) // slot. @@ -2645,14 +2624,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code, // Code common for calls using the IC. -void FullCodeGenerator::EmitCallWithIC(Call* expr) { +void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - CallFunctionFlags flags; + CallIC::CallType call_type = callee->IsVariableProxy() + ? CallIC::FUNCTION + : CallIC::METHOD; + // Get the target function. - if (callee->IsVariableProxy()) { + if (call_type == CallIC::FUNCTION) { { StackValueContext context(this); EmitVariableLoad(callee->AsVariableProxy()); PrepareForBailout(callee, NO_REGISTERS); @@ -2660,7 +2640,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push undefined as receiver. This is patched in the method prologue if it // is a sloppy mode method. __ Push(isolate()->factory()->undefined_value()); - flags = NO_CALL_FUNCTION_FLAGS; } else { // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2671,39 +2650,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { __ lw(at, MemOperand(sp, 0)); __ push(at); __ sw(v0, MemOperand(sp, kPointerSize)); - flags = CALL_AS_METHOD; - } - - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, flags); - __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - RecordJSReturnSite(expr); - - // Restore context register. - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, v0); + EmitCall(expr, call_type); } // Code common for calls using the IC. -void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, - Expression* key) { +void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, + Expression* key) { // Load the key. VisitForAccumulatorValue(key); Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2716,28 +2675,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, __ push(at); __ sw(v0, MemOperand(sp, kPointerSize)); - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, CALL_AS_METHOD); - __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - // Restore context register. - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, v0); + EmitCall(expr, CallIC::METHOD); } -void FullCodeGenerator::EmitCallWithStub(Call* expr) { - // Code common for calls using the call stub. +void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { + // Load the arguments. ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); { PreservePositionScope scope(masm()->positions_recorder()); @@ -2745,19 +2688,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { VisitForStackValue(args->at(i)); } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); - __ li(a2, FeedbackVector()); + // Record source position of the IC call. + SetSourcePosition(expr->position()); + Handle<Code> ic = CallIC::initialize_stub( + isolate(), arg_count, call_type); __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); - - // Record call targets in unoptimized code. - CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); + // Don't assign a type feedback id to the IC, since type feedback is provided + // by the vector above. + CallIC(ic); + RecordJSReturnSite(expr); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2831,7 +2772,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { } // Record source position for debugger. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); RecordJSReturnSite(expr); @@ -2839,7 +2780,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); context()->DropAndPlug(1, v0); } else if (call_type == Call::GLOBAL_CALL) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else if (call_type == Call::LOOKUP_SLOT_CALL) { // Call to a lookup slot (dynamically introduced variable). VariableProxy* proxy = callee->AsVariableProxy(); @@ -2878,16 +2819,16 @@ void FullCodeGenerator::VisitCall(Call* expr) { // The receiver is either the global receiver or an object found // by LoadContextSlot. - EmitCallWithStub(expr); + EmitCall(expr); } else if (call_type == Call::PROPERTY_CALL) { Property* property = callee->AsProperty(); { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(property->obj()); } if (property->key()->IsPropertyName()) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else { - EmitKeyedCallWithIC(expr, property->key()); + EmitKeyedCallWithLoadIC(expr, property->key()); } } else { ASSERT(call_type == Call::OTHER_CALL); @@ -2898,7 +2839,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); __ push(a1); // Emit function call. - EmitCallWithStub(expr); + EmitCall(expr); } #ifdef DEBUG @@ -2935,12 +2876,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); // Record call targets in unoptimized code. - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); if (FLAG_pretenuring_call_new) { - StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), - isolate()->factory()->NewAllocationSite()); + EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); ASSERT(expr->AllocationSiteFeedbackSlot() == expr->CallNewFeedbackSlot() + 1); } @@ -2948,8 +2885,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ li(a2, FeedbackVector()); __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); - CallConstructStub stub(RECORD_CALL_TARGET); - __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); + CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); + __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); PrepareForBailoutForId(expr->ReturnId(), TOS_REG); context()->Plug(v0); } @@ -3324,7 +3261,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ mov(a1, v0); __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); - ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); __ CallStub(&stub); context()->Plug(v0); } @@ -3411,31 +3348,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { } -void FullCodeGenerator::EmitLog(CallRuntime* expr) { - // Conditionally generate a log call. - // Args: - // 0 (literal string): The type of logging (corresponds to the flags). - // This is used to determine whether or not to generate the log call. - // 1 (string): Format string. Access the string at argument index 2 - // with '%2s' (see Logger::LogRuntime for all the formats). - // 2 (array): Arguments to the format string. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT_EQ(args->length(), 3); - if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { - VisitForStackValue(args->at(1)); - VisitForStackValue(args->at(2)); - __ CallRuntime(Runtime::kHiddenLog, 2); - } - - // Finally, we're expected to leave a value on the top of the stack. - __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); - context()->Plug(v0); -} - - void FullCodeGenerator::EmitSubString(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - SubStringStub stub; + SubStringStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3448,7 +3363,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpExecStub stub; + RegExpExecStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 4); VisitForStackValue(args->at(0)); @@ -3607,7 +3522,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { ASSERT(args->length() == 2); VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - MathPowStub stub(MathPowStub::ON_STACK); + MathPowStub stub(isolate(), MathPowStub::ON_STACK); __ CallStub(&stub); context()->Plug(v0); } @@ -3650,7 +3565,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ mov(a0, result_register()); - NumberToStringStub stub; + NumberToStringStub stub(isolate()); __ CallStub(&stub); context()->Plug(v0); } @@ -3779,7 +3694,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { __ pop(a1); __ mov(a0, result_register()); // StringAddStub requires args in a0, a1. - StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); + StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); __ CallStub(&stub); context()->Plug(v0); } @@ -3792,32 +3707,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - StringCompareStub stub; + StringCompareStub stub(isolate()); __ CallStub(&stub); context()->Plug(v0); } -void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_log, 1); - context()->Plug(v0); -} - - -void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_sqrt, 1); - context()->Plug(v0); -} - - void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() >= 2); @@ -3851,7 +3746,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -4226,7 +4121,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { // Record source position of the IC call. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); @@ -4358,7 +4253,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { - ASSERT(expr->expression()->IsValidLeftHandSide()); + ASSERT(expr->expression()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ CountOperation"); SetSourcePosition(expr->position()); @@ -4448,7 +4343,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ jmp(&stub_call); __ bind(&slow); } - ToNumberStub convert_stub; + ToNumberStub convert_stub(isolate()); __ CallStub(&convert_stub); // Save result for postfix expressions. @@ -4478,8 +4373,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { // Record position before stub call. SetSourcePosition(expr->position()); - BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); + BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE); + CallIC(stub.GetCode(), expr->CountBinOpFeedbackId()); patch_site.EmitPatchInfo(); __ bind(&done); @@ -4591,12 +4486,13 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - if (check->Equals(isolate()->heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(check, factory->number_string())) { __ JumpIfSmi(v0, if_true); __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); Split(eq, v0, Operand(at), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->string_string())) { + } else if (String::Equals(check, factory->string_string())) { __ JumpIfSmi(v0, if_false); // Check for undetectable objects => false. __ GetObjectType(v0, v0, a1); @@ -4605,20 +4501,20 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->symbol_string())) { + } else if (String::Equals(check, factory->symbol_string())) { __ JumpIfSmi(v0, if_false); __ GetObjectType(v0, v0, a1); Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->boolean_string())) { + } else if (String::Equals(check, factory->boolean_string())) { __ LoadRoot(at, Heap::kTrueValueRootIndex); __ Branch(if_true, eq, v0, Operand(at)); __ LoadRoot(at, Heap::kFalseValueRootIndex); Split(eq, v0, Operand(at), if_true, if_false, fall_through); } else if (FLAG_harmony_typeof && - check->Equals(isolate()->heap()->null_string())) { + String::Equals(check, factory->null_string())) { __ LoadRoot(at, Heap::kNullValueRootIndex); Split(eq, v0, Operand(at), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->undefined_string())) { + } else if (String::Equals(check, factory->undefined_string())) { __ LoadRoot(at, Heap::kUndefinedValueRootIndex); __ Branch(if_true, eq, v0, Operand(at)); __ JumpIfSmi(v0, if_false); @@ -4627,14 +4523,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->function_string())) { + } else if (String::Equals(check, factory->function_string())) { __ JumpIfSmi(v0, if_false); STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ GetObjectType(v0, v0, a1); __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->object_string())) { + } else if (String::Equals(check, factory->object_string())) { __ JumpIfSmi(v0, if_false); if (!FLAG_harmony_typeof) { __ LoadRoot(at, Heap::kNullValueRootIndex); @@ -4686,7 +4582,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::INSTANCEOF: { VisitForStackValue(expr->right()); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ CallStub(&stub); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); // The stub returns 0 for true. diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc index 09ffe95c0..d78fdc643 100644 --- a/deps/v8/src/mips/ic-mips.cc +++ b/deps/v8/src/mips/ic-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. @@ -918,7 +895,7 @@ static void KeyedStoreGenerateGenericHelper( // We have to see if the double version of the hole is present. If so // go to the runtime. __ Addu(address, elements, - Operand(FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32) + Operand(FixedDoubleArray::kHeaderSize + kHoleNanUpper32Offset - kHeapObjectTag)); __ sll(at, key, kPointerSizeLog2); __ addu(address, address, at); diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc index 970a1bfc2..95ee3a6c1 100644 --- a/deps/v8/src/mips/lithium-codegen-mips.cc +++ b/deps/v8/src/mips/lithium-codegen-mips.cc @@ -86,13 +86,6 @@ void LCodeGen::FinishCode(Handle<Code> code) { code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); PopulateDeoptimizationData(code); - info()->CommitDependencies(code); -} - - -void LChunkBuilder::Abort(BailoutReason reason) { - info()->set_bailout_reason(reason); - status_ = ABORTED; } @@ -203,7 +196,7 @@ bool LCodeGen::GeneratePrologue() { Comment(";;; Allocate local context"); // Argument to NewContext is the function, which is in a1. if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ push(a1); @@ -744,6 +737,7 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode) { + environment->set_has_been_used(); if (!environment->HasBeenRegistered()) { // Physical stack frame layout: // -x ............. -4 0 ..................................... y @@ -860,7 +854,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { int length = deoptimizations_.length(); if (length == 0) return; Handle<DeoptimizationInputData> data = - factory()->NewDeoptimizationInputData(length, TENURED); + DeoptimizationInputData::New(isolate(), length, TENURED); Handle<ByteArray> translations = translations_.CreateByteArray(isolate()->factory()); @@ -1045,18 +1039,18 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ASSERT(ToRegister(instr->result()).is(v0)); switch (instr->hydrogen()->major_key()) { case CodeStub::RegExpExec: { - RegExpExecStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + RegExpExecStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::SubString: { - SubStringStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + SubStringStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::StringCompare: { - StringCompareStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + StringCompareStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } default: @@ -1174,7 +1168,7 @@ void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { Register dividend = ToRegister(instr->dividend()); int32_t divisor = instr->divisor(); Register result = ToRegister(instr->result()); - ASSERT(divisor == kMinInt || (divisor != 0 && IsPowerOf2(Abs(divisor)))); + ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); ASSERT(!result.is(dividend)); // Check for (0 / -x) that will produce negative zero. @@ -1242,26 +1236,27 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. void LCodeGen::DoDivI(LDivI* instr) { HBinaryOperation* hdiv = instr->hydrogen(); - const Register left = ToRegister(instr->left()); - const Register right = ToRegister(instr->right()); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); const Register result = ToRegister(instr->result()); // On MIPS div is asynchronous - it will run in the background while we // check for special cases. - __ div(left, right); + __ div(dividend, divisor); // Check for x / 0. if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { - DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg)); + DeoptimizeIf(eq, instr->environment(), divisor, Operand(zero_reg)); } // Check for (0 / -x) that will produce negative zero. if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { Label left_not_zero; - __ Branch(&left_not_zero, ne, left, Operand(zero_reg)); - DeoptimizeIf(lt, instr->environment(), right, Operand(zero_reg)); + __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); + DeoptimizeIf(lt, instr->environment(), divisor, Operand(zero_reg)); __ bind(&left_not_zero); } @@ -1269,23 +1264,12 @@ void LCodeGen::DoDivI(LDivI* instr) { if (hdiv->CheckFlag(HValue::kCanOverflow) && !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { Label left_not_min_int; - __ Branch(&left_not_min_int, ne, left, Operand(kMinInt)); - DeoptimizeIf(eq, instr->environment(), right, Operand(-1)); + __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); + DeoptimizeIf(eq, instr->environment(), divisor, Operand(-1)); __ bind(&left_not_min_int); } - if (hdiv->IsMathFloorOfDiv()) { - // We performed a truncating division. Correct the result if necessary. - Label done; - Register remainder = scratch0(); - __ mfhi(remainder); - __ mflo(result); - __ Branch(&done, eq, remainder, Operand(zero_reg), USE_DELAY_SLOT); - __ Xor(remainder, remainder, Operand(right)); - __ Branch(&done, ge, remainder, Operand(zero_reg)); - __ Subu(result, result, Operand(1)); - __ bind(&done); - } else if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { __ mfhi(result); DeoptimizeIf(ne, instr->environment(), result, Operand(zero_reg)); __ mflo(result); @@ -1331,25 +1315,27 @@ void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg)); } - if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { - // Note that we could emit branch-free code, but that would need one more - // register. - __ Xor(at, scratch, result); - if (divisor == -1) { - DeoptimizeIf(ge, instr->environment(), at, Operand(zero_reg)); - __ sra(result, dividend, shift); - } else { - Label no_overflow, done; - __ Branch(&no_overflow, lt, at, Operand(zero_reg)); - __ li(result, Operand(kMinInt / divisor)); - __ Branch(&done); - __ bind(&no_overflow); - __ sra(result, dividend, shift); - __ bind(&done); - } - } else { + + // If the negation could not overflow, simply shifting is OK. + if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { __ sra(result, dividend, shift); + return; + } + + // Dividing by -1 is basically negation, unless we overflow. + __ Xor(at, scratch, result); + if (divisor == -1) { + DeoptimizeIf(ge, instr->environment(), at, Operand(zero_reg)); + return; } + + Label no_overflow, done; + __ Branch(&no_overflow, lt, at, Operand(zero_reg)); + __ li(result, Operand(kMinInt / divisor)); + __ Branch(&done); + __ bind(&no_overflow); + __ sra(result, dividend, shift); + __ bind(&done); } @@ -1398,6 +1384,52 @@ void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. +void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { + HBinaryOperation* hdiv = instr->hydrogen(); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); + const Register result = ToRegister(instr->result()); + + // On MIPS div is asynchronous - it will run in the background while we + // check for special cases. + __ div(dividend, divisor); + + // Check for x / 0. + if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { + DeoptimizeIf(eq, instr->environment(), divisor, Operand(zero_reg)); + } + + // Check for (0 / -x) that will produce negative zero. + if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { + Label left_not_zero; + __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); + DeoptimizeIf(lt, instr->environment(), divisor, Operand(zero_reg)); + __ bind(&left_not_zero); + } + + // Check for (kMinInt / -1). + if (hdiv->CheckFlag(HValue::kCanOverflow) && + !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + Label left_not_min_int; + __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); + DeoptimizeIf(eq, instr->environment(), divisor, Operand(-1)); + __ bind(&left_not_min_int); + } + + // We performed a truncating division. Correct the result if necessary. + Label done; + Register remainder = scratch0(); + __ mfhi(remainder); + __ mflo(result); + __ Branch(&done, eq, remainder, Operand(zero_reg), USE_DELAY_SLOT); + __ Xor(remainder, remainder, Operand(divisor)); + __ Branch(&done, ge, remainder, Operand(zero_reg)); + __ Subu(result, result, Operand(1)); + __ bind(&done); +} + + void LCodeGen::DoMulI(LMulI* instr) { Register scratch = scratch0(); Register result = ToRegister(instr->result()); @@ -1696,9 +1728,16 @@ void LCodeGen::DoConstantE(LConstantE* instr) { void LCodeGen::DoConstantT(LConstantT* instr) { - Handle<Object> value = instr->value(isolate()); + Handle<Object> object = instr->value(isolate()); AllowDeferredHandleDereference smi_check; - __ li(ToRegister(instr->result()), value); + if (instr->hydrogen()->HasObjectMap()) { + Handle<Map> object_map = instr->hydrogen()->ObjectMap().handle(); + ASSERT(object->IsHeapObject()); + ASSERT(!object_map->is_stable() || + *object_map == Handle<HeapObject>::cast(object)->map()); + USE(object_map); + } + __ li(ToRegister(instr->result()), object); } @@ -1972,8 +2011,8 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->right()).is(a0)); ASSERT(ToRegister(instr->result()).is(v0)); - BinaryOpICStub stub(instr->op(), NO_OVERWRITE); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); // Other arch use a nop here, to signal that there is no inlined // patchable code. Mips does not need the nop, since our marker // instruction (andi zero_reg) will never be used in normal code. @@ -2614,8 +2653,8 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) { Register result = ToRegister(instr->result()); ASSERT(result.is(v0)); - InstanceofStub stub(InstanceofStub::kArgsInRegisters); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); __ Branch(&true_label, eq, result, Operand(zero_reg)); __ li(result, Operand(factory()->false_value())); @@ -2715,7 +2754,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, flags | InstanceofStub::kCallSiteInlineCheck); flags = static_cast<InstanceofStub::Flags>( flags | InstanceofStub::kReturnTrueFalseObject); - InstanceofStub stub(flags); + InstanceofStub stub(isolate(), flags); PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); LoadContextFromDeferred(instr->context()); @@ -2735,7 +2774,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE); __ StoreToSafepointRegisterSlot(temp, temp); } - CallCodeGeneric(stub.GetCode(isolate()), + CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); @@ -3180,7 +3219,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { __ ldc1(result, MemOperand(scratch)); if (instr->hydrogen()->RequiresHoleCheck()) { - __ lw(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); + __ lw(scratch, MemOperand(scratch, kHoleNanUpper32Offset)); DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32)); } } @@ -3259,7 +3298,8 @@ MemOperand LCodeGen::PrepareKeyedOperand(Register key, __ Addu(scratch0(), scratch0(), Operand(base_offset)); } else { ASSERT_EQ(-1, shift_size); - __ srl(scratch0(), key, 1); + // Key can be negative, so using sra here. + __ sra(scratch0(), key, 1); __ Addu(scratch0(), scratch0(), Operand(base_offset)); } __ Addu(scratch0(), base, scratch0()); @@ -3808,7 +3848,7 @@ void LCodeGen::DoPower(LPower* instr) { ASSERT(ToDoubleRegister(instr->result()).is(f0)); if (exponent_type.IsSmi()) { - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsTagged()) { Label no_deopt; @@ -3817,14 +3857,14 @@ void LCodeGen::DoPower(LPower* instr) { __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); __ bind(&no_deopt); - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsInteger32()) { - MathPowStub stub(MathPowStub::INTEGER); + MathPowStub stub(isolate(), MathPowStub::INTEGER); __ CallStub(&stub); } else { ASSERT(exponent_type.IsDouble()); - MathPowStub stub(MathPowStub::DOUBLE); + MathPowStub stub(isolate(), MathPowStub::DOUBLE); __ CallStub(&stub); } } @@ -3928,8 +3968,8 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ASSERT(ToRegister(instr->result()).is(v0)); int arity = instr->arity(); - CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -3941,8 +3981,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) { __ li(a0, Operand(instr->arity())); // No cell in a2 for construct type feedback in optimized code __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } @@ -3960,8 +4000,8 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { : DONT_OVERRIDE; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { Label done; if (IsFastPackedElementsKind(kind)) { @@ -3972,18 +4012,20 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { __ Branch(&packed_case, eq, t1, Operand(zero_reg)); ElementsKind holey_kind = GetHoleyElementsKind(kind); - ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), + holey_kind, + override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ jmp(&done); __ bind(&packed_case); } - ArraySingleArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } } @@ -4031,7 +4073,6 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { return; } - Handle<Map> transition = instr->transition(); SmiCheck check_needed = instr->hydrogen()->value()->IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; @@ -4045,19 +4086,21 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { __ SmiTst(value, scratch); DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg)); - // We know that value is a smi now, so we can omit the check below. + // We know now that value is not a smi, so we can omit the check below. check_needed = OMIT_SMI_CHECK; } } else if (representation.IsDouble()) { - ASSERT(transition.is_null()); ASSERT(access.IsInobject()); + ASSERT(!instr->hydrogen()->has_transition()); ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); DoubleRegister value = ToDoubleRegister(instr->value()); __ sdc1(value, FieldMemOperand(object, offset)); return; } - if (!transition.is_null()) { + if (instr->hydrogen()->has_transition()) { + Handle<Map> transition = instr->hydrogen()->transition_map(); + AddDeprecationDependency(transition); __ li(scratch, Operand(transition)); __ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); if (instr->hydrogen()->NeedsWriteBarrierForMap()) { @@ -4122,42 +4165,25 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { } -void LCodeGen::ApplyCheckIf(Condition condition, - LBoundsCheck* check, - Register src1, - const Operand& src2) { - if (FLAG_debug_code && check->hydrogen()->skip_check()) { +void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { + Condition cc = instr->hydrogen()->allow_equality() ? hi : hs; + Operand operand(0); + Register reg; + if (instr->index()->IsConstantOperand()) { + operand = ToOperand(instr->index()); + reg = ToRegister(instr->length()); + cc = ReverseCondition(cc); + } else { + reg = ToRegister(instr->index()); + operand = ToOperand(instr->length()); + } + if (FLAG_debug_code && instr->hydrogen()->skip_check()) { Label done; - __ Branch(&done, NegateCondition(condition), src1, src2); + __ Branch(&done, NegateCondition(cc), reg, operand); __ stop("eliminated bounds check failed"); __ bind(&done); } else { - DeoptimizeIf(condition, check->environment(), src1, src2); - } -} - - -void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { - if (instr->hydrogen()->skip_check()) return; - - Condition condition = instr->hydrogen()->allow_equality() ? hi : hs; - if (instr->index()->IsConstantOperand()) { - int constant_index = - ToInteger32(LConstantOperand::cast(instr->index())); - if (instr->hydrogen()->length()->representation().IsSmi()) { - __ li(at, Operand(Smi::FromInt(constant_index))); - } else { - __ li(at, Operand(constant_index)); - } - ApplyCheckIf(condition, - instr, - at, - Operand(ToRegister(instr->length()))); - } else { - ApplyCheckIf(condition, - instr, - ToRegister(instr->index()), - Operand(ToRegister(instr->length()))); + DeoptimizeIf(cc, instr->environment(), reg, operand); } } @@ -4401,16 +4427,16 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, scratch, GetRAState(), kDontSaveFPRegs); } else { + ASSERT(object_reg.is(a0)); ASSERT(ToRegister(instr->context()).is(cp)); PushSafepointRegistersScope scope( this, Safepoint::kWithRegistersAndDoubles); - __ mov(a0, object_reg); __ li(a1, Operand(to_map)); bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; - TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); + TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); __ CallStub(&stub); RecordSafepointWithRegistersAndDoubles( - instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); + instr->pointer_map(), 0, Safepoint::kLazyDeopt); } __ bind(¬_applicable); } @@ -4431,9 +4457,10 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) { ASSERT(ToRegister(instr->context()).is(cp)); ASSERT(ToRegister(instr->left()).is(a1)); ASSERT(ToRegister(instr->right()).is(a0)); - StringAddStub stub(instr->hydrogen()->flags(), + StringAddStub stub(isolate(), + instr->hydrogen()->flags(), instr->hydrogen()->pretenure_flag()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -5144,7 +5171,14 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register object_; }; - if (instr->hydrogen()->CanOmitMapChecks()) return; + if (instr->hydrogen()->IsStabilityCheck()) { + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); + for (int i = 0; i < maps->size(); ++i) { + AddStabilityDependency(maps->at(i).handle()); + } + return; + } + Register map_reg = scratch0(); LOperand* input = instr->value(); ASSERT(input->IsRegister()); @@ -5152,20 +5186,20 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); DeferredCheckMaps* deferred = NULL; - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { deferred = new(zone()) DeferredCheckMaps(this, instr, reg); __ bind(deferred->check_maps()); } - UniqueSet<Map> map_set = instr->hydrogen()->map_set(); + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); Label success; - for (int i = 0; i < map_set.size() - 1; i++) { - Handle<Map> map = map_set.at(i).handle(); + for (int i = 0; i < maps->size() - 1; i++) { + Handle<Map> map = maps->at(i).handle(); __ CompareMapAndBranch(map_reg, map, &success, eq, &success); } - Handle<Map> map = map_set.at(map_set.size() - 1).handle(); + Handle<Map> map = maps->at(maps->size() - 1).handle(); // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { __ Branch(deferred->entry(), ne, map_reg, Operand(map)); } else { DeoptimizeIf(ne, instr->environment(), map_reg, Operand(map)); @@ -5333,7 +5367,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { __ push(size); } else { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - __ Push(Smi::FromInt(size)); + if (size >= 0 && size <= Smi::kMaxValue) { + __ Push(Smi::FromInt(size)); + } else { + // We should never get here at runtime => abort + __ stop("invalid allocation size"); + return; + } } int flags = AllocateDoubleAlignFlag::encode( @@ -5423,10 +5463,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && instr->hydrogen()->has_no_literals()) { - FastNewClosureStub stub(instr->hydrogen()->strict_mode(), + FastNewClosureStub stub(isolate(), + instr->hydrogen()->strict_mode(), instr->hydrogen()->is_generator()); __ li(a2, Operand(instr->hydrogen()->shared_info())); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { __ li(a2, Operand(instr->hydrogen()->shared_info())); __ li(a1, Operand(pretenure ? factory()->true_value() @@ -5478,7 +5519,8 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, // register. Condition final_branch_condition = kNoCondition; Register scratch = scratch0(); - if (type_name->Equals(heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(type_name, factory->number_string())) { __ JumpIfSmi(input, true_label); __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset)); __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); @@ -5486,7 +5528,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, cmp2 = Operand(at); final_branch_condition = eq; - } else if (type_name->Equals(heap()->string_string())) { + } else if (String::Equals(type_name, factory->string_string())) { __ JumpIfSmi(input, false_label); __ GetObjectType(input, input, scratch); __ Branch(USE_DELAY_SLOT, false_label, @@ -5499,14 +5541,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, cmp2 = Operand(zero_reg); final_branch_condition = eq; - } else if (type_name->Equals(heap()->symbol_string())) { + } else if (String::Equals(type_name, factory->symbol_string())) { __ JumpIfSmi(input, false_label); __ GetObjectType(input, input, scratch); cmp1 = scratch; cmp2 = Operand(SYMBOL_TYPE); final_branch_condition = eq; - } else if (type_name->Equals(heap()->boolean_string())) { + } else if (String::Equals(type_name, factory->boolean_string())) { __ LoadRoot(at, Heap::kTrueValueRootIndex); __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input)); __ LoadRoot(at, Heap::kFalseValueRootIndex); @@ -5514,13 +5556,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, cmp2 = Operand(input); final_branch_condition = eq; - } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { + } else if (FLAG_harmony_typeof && + String::Equals(type_name, factory->null_string())) { __ LoadRoot(at, Heap::kNullValueRootIndex); cmp1 = at; cmp2 = Operand(input); final_branch_condition = eq; - } else if (type_name->Equals(heap()->undefined_string())) { + } else if (String::Equals(type_name, factory->undefined_string())) { __ LoadRoot(at, Heap::kUndefinedValueRootIndex); __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input)); // The first instruction of JumpIfSmi is an And - it is safe in the delay @@ -5534,7 +5577,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, cmp2 = Operand(zero_reg); final_branch_condition = ne; - } else if (type_name->Equals(heap()->function_string())) { + } else if (String::Equals(type_name, factory->function_string())) { STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ JumpIfSmi(input, false_label); __ GetObjectType(input, scratch, input); @@ -5543,7 +5586,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, cmp2 = Operand(JS_FUNCTION_PROXY_TYPE); final_branch_condition = eq; - } else if (type_name->Equals(heap()->object_string())) { + } else if (String::Equals(type_name, factory->object_string())) { __ JumpIfSmi(input, false_label); if (!FLAG_harmony_typeof) { __ LoadRoot(at, Heap::kNullValueRootIndex); @@ -5792,13 +5835,60 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { } +void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) { + PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); + __ Push(object, index); + __ mov(cp, zero_reg); + __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); + RecordSafepointWithRegisters( + instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); + __ StoreToSafepointRegisterSlot(v0, result); +} + + void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { + class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { + public: + DeferredLoadMutableDouble(LCodeGen* codegen, + LLoadFieldByIndex* instr, + Register result, + Register object, + Register index) + : LDeferredCode(codegen), + instr_(instr), + result_(result), + object_(object), + index_(index) { + } + virtual void Generate() V8_OVERRIDE { + codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_); + } + virtual LInstruction* instr() V8_OVERRIDE { return instr_; } + private: + LLoadFieldByIndex* instr_; + Register result_; + Register object_; + Register index_; + }; + Register object = ToRegister(instr->object()); Register index = ToRegister(instr->index()); Register result = ToRegister(instr->result()); Register scratch = scratch0(); + DeferredLoadMutableDouble* deferred; + deferred = new(zone()) DeferredLoadMutableDouble( + this, instr, result, object, index); + Label out_of_object, done; + + __ And(scratch, index, Operand(Smi::FromInt(1))); + __ Branch(deferred->entry(), ne, scratch, Operand(zero_reg)); + __ sra(index, index, 1); + __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg)); __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot. @@ -5814,6 +5904,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { __ Subu(scratch, result, scratch); __ lw(result, FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); + __ bind(deferred->exit()); __ bind(&done); } diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h index 63f0661ae..7c52d8182 100644 --- a/deps/v8/src/mips/lithium-codegen-mips.h +++ b/deps/v8/src/mips/lithium-codegen-mips.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ @@ -34,7 +11,7 @@ #include "lithium-codegen.h" #include "safepoint-table.h" #include "scopes.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { @@ -140,6 +117,10 @@ class LCodeGen: public LCodeGenBase { Label* map_check); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); + void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register result, + Register object, + Register index); // Parallel move support. void DoParallelMove(LParallelMove* move); @@ -182,8 +163,6 @@ class LCodeGen: public LCodeGenBase { int GetStackSlotCount() const { return chunk()->spill_slot_count(); } - void Abort(BailoutReason reason); - void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } void SaveCallerDoubles(); @@ -259,10 +238,6 @@ class LCodeGen: public LCodeGenBase { LEnvironment* environment, Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg)); - void ApplyCheckIf(Condition condition, - LBoundsCheck* check, - Register src1 = zero_reg, - const Operand& src2 = Operand(zero_reg)); void AddToTranslation(LEnvironment* environment, Translation* translation, @@ -423,13 +398,15 @@ class LCodeGen: public LCodeGenBase { switch (codegen_->expected_safepoint_kind_) { case Safepoint::kWithRegisters: { - StoreRegistersStateStub stub1(kDontSaveFPRegs); + StoreRegistersStateStub stub1(codegen_->masm_->isolate(), + kDontSaveFPRegs); codegen_->masm_->push(ra); codegen_->masm_->CallStub(&stub1); break; } case Safepoint::kWithRegistersAndDoubles: { - StoreRegistersStateStub stub2(kSaveFPRegs); + StoreRegistersStateStub stub2(codegen_->masm_->isolate(), + kSaveFPRegs); codegen_->masm_->push(ra); codegen_->masm_->CallStub(&stub2); break; @@ -444,13 +421,15 @@ class LCodeGen: public LCodeGenBase { ASSERT((kind & Safepoint::kWithRegisters) != 0); switch (kind) { case Safepoint::kWithRegisters: { - RestoreRegistersStateStub stub1(kDontSaveFPRegs); + RestoreRegistersStateStub stub1(codegen_->masm_->isolate(), + kDontSaveFPRegs); codegen_->masm_->push(ra); codegen_->masm_->CallStub(&stub1); break; } case Safepoint::kWithRegistersAndDoubles: { - RestoreRegistersStateStub stub2(kSaveFPRegs); + RestoreRegistersStateStub stub2(codegen_->masm_->isolate(), + kSaveFPRegs); codegen_->masm_->push(ra); codegen_->masm_->CallStub(&stub2); break; diff --git a/deps/v8/src/mips/lithium-gap-resolver-mips.cc b/deps/v8/src/mips/lithium-gap-resolver-mips.cc index 3ee74866c..69af8b7ee 100644 --- a/deps/v8/src/mips/lithium-gap-resolver-mips.cc +++ b/deps/v8/src/mips/lithium-gap-resolver-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/mips/lithium-gap-resolver-mips.h b/deps/v8/src/mips/lithium-gap-resolver-mips.h index ea1ea3cbb..f3f6b7d61 100644 --- a/deps/v8/src/mips/lithium-gap-resolver-mips.h +++ b/deps/v8/src/mips/lithium-gap-resolver-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_LITHIUM_GAP_RESOLVER_MIPS_H_ #define V8_MIPS_LITHIUM_GAP_RESOLVER_MIPS_H_ diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc index 752f67673..eb960a4bb 100644 --- a/deps/v8/src/mips/lithium-mips.cc +++ b/deps/v8/src/mips/lithium-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -451,7 +428,7 @@ LPlatformChunk* LChunkBuilder::Build() { } -void LCodeGen::Abort(BailoutReason reason) { +void LChunkBuilder::Abort(BailoutReason reason) { info()->set_bailout_reason(reason); status_ = ABORTED; } @@ -628,6 +605,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); + // We can't really figure out if the environment is needed or not. + instr->environment()->set_has_been_used(); } return instr; @@ -879,7 +858,8 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 - if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) { + if (!(instr->ClobbersRegisters() && + instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { @@ -939,18 +919,20 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) { if (goto_instr != NULL) return goto_instr; HValue* value = instr->value(); - LBranch* result = new(zone()) LBranch(UseRegister(value)); - // Tagged values that are not known smis or booleans require a - // deoptimization environment. If the instruction is generic no - // environment is needed since all cases are handled. - Representation rep = value->representation(); + Representation r = value->representation(); HType type = value->type(); ToBooleanStub::Types expected = instr->expected_input_types(); - if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() && - !expected.IsGeneric()) { - return AssignEnvironment(result); + if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); + + bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() || + type.IsJSArray() || type.IsHeapNumber() || type.IsString(); + LInstruction* branch = new(zone()) LBranch(UseRegister(value)); + if (!easy_case && + ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) || + !expected.IsGeneric())) { + branch = AssignEnvironment(branch); } - return result; + return branch; } @@ -1162,8 +1144,11 @@ LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) { ? NULL : UseFixed(instr->context(), cp); LOperand* input = UseRegister(instr->value()); - LMathAbs* result = new(zone()) LMathAbs(context, input); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + LInstruction* result = + DefineAsRegister(new(zone()) LMathAbs(context, input)); + if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result); + if (!r.IsDouble()) result = AssignEnvironment(result); + return result; } @@ -1290,14 +1275,23 @@ LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) { } -LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { +LInstruction* LChunkBuilder::DoDivI(HDiv* instr) { ASSERT(instr->representation().IsSmiOrInteger32()); ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); LOperand* divisor = UseRegister(instr->right()); - LDivI* div = new(zone()) LDivI(dividend, divisor); - return AssignEnvironment(DefineAsRegister(div)); + LInstruction* result = + DefineAsRegister(new(zone()) LDivI(dividend, divisor)); + if (instr->CheckFlag(HValue::kCanBeDivByZero) || + instr->CheckFlag(HValue::kBailoutOnMinusZero) || + (instr->CheckFlag(HValue::kCanOverflow) && + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) || + (!instr->IsMathFloorOfDiv() && + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { + result = AssignEnvironment(result); + } + return result; } @@ -1351,13 +1345,24 @@ LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) { } +LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) { + ASSERT(instr->representation().IsSmiOrInteger32()); + ASSERT(instr->left()->representation().Equals(instr->representation())); + ASSERT(instr->right()->representation().Equals(instr->representation())); + LOperand* dividend = UseRegister(instr->left()); + LOperand* divisor = UseRegister(instr->right()); + LFlooringDivI* div = new(zone()) LFlooringDivI(dividend, divisor); + return AssignEnvironment(DefineAsRegister(div)); +} + + LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { if (instr->RightIsPowerOf2()) { return DoFlooringDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoFlooringDivByConstI(instr); } else { - return DoDivI(instr); + return DoFlooringDivI(instr); } } @@ -1599,6 +1604,8 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { + LInstruction* goto_instr = CheckElideControlInstruction(instr); + if (goto_instr != NULL) return goto_instr; Representation r = instr->representation(); if (r.IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(r)); @@ -1753,9 +1760,16 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - LOperand* value = UseRegisterOrConstantAtStart(instr->index()); - LOperand* length = UseRegister(instr->length()); - return AssignEnvironment(new(zone()) LBoundsCheck(value, length)); + if (!FLAG_debug_code && instr->skip_check()) return NULL; + LOperand* index = UseRegisterOrConstantAtStart(instr->index()); + LOperand* length = !index->IsConstantOperand() + ? UseRegisterOrConstantAtStart(instr->length()) + : UseRegisterAtStart(instr->length()); + LInstruction* result = new(zone()) LBoundsCheck(index, length); + if (!FLAG_debug_code || !instr->skip_check()) { + result = AssignEnvironment(result); + } + return result; } @@ -1789,20 +1803,21 @@ LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) { LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); + HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LDummyUse(value)); } from = Representation::Tagged(); } if (from.IsTagged()) { if (to.IsDouble()) { - LOperand* value = UseRegister(instr->value()); - LNumberUntagD* res = new(zone()) LNumberUntagD(value); - return AssignEnvironment(DefineAsRegister(res)); + LOperand* value = UseRegister(val); + LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); if (val->type().IsSmi()) { return DefineSameAsFirst(new(zone()) LDummyUse(value)); @@ -1810,66 +1825,59 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value))); } else { ASSERT(to.IsInteger32()); - LOperand* value = NULL; - LInstruction* res = NULL; - HValue* val = instr->value(); if (val->type().IsSmi() || val->representation().IsSmi()) { - value = UseRegisterAtStart(val); - res = DefineAsRegister(new(zone()) LSmiUntag(value, false)); + LOperand* value = UseRegisterAtStart(val); + return DefineAsRegister(new(zone()) LSmiUntag(value, false)); } else { - value = UseRegister(val); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = FixedTemp(f22); - res = DefineSameAsFirst(new(zone()) LTaggedToI(value, - temp1, - temp2)); - res = AssignEnvironment(res); + LInstruction* result = + DefineSameAsFirst(new(zone()) LTaggedToI(value, temp1, temp2)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } - return res; } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); - - // Make sure that the temp and result_temp registers are - // different. LUnallocated* result_temp = TempRegister(); LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2); - Define(result, result_temp); - return AssignPointerMap(result); + return AssignPointerMap(Define(result, result_temp)); } else if (to.IsSmi()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return AssignEnvironment( DefineAsRegister(new(zone()) LDoubleToSmi(value))); } else { ASSERT(to.IsInteger32()); - LOperand* value = UseRegister(instr->value()); - LDoubleToI* res = new(zone()) LDoubleToI(value); - return AssignEnvironment(DefineAsRegister(res)); + LOperand* value = UseRegister(val); + LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value)); + if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result); + return result; } } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { - HValue* val = instr->value(); - LOperand* value = UseRegisterAtStart(val); if (!instr->CheckFlag(HValue::kCanOverflow)) { + LOperand* value = UseRegisterAtStart(val); return DefineAsRegister(new(zone()) LSmiTag(value)); } else if (val->CheckFlag(HInstruction::kUint32)) { + LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } else { + LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value)); if (instr->CheckFlag(HValue::kCanOverflow)) { @@ -1878,12 +1886,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return result; } else { ASSERT(to.IsDouble()); - if (instr->value()->CheckFlag(HInstruction::kUint32)) { - return DefineAsRegister( - new(zone()) LUint32ToDouble(UseRegister(instr->value()))); + if (val->CheckFlag(HInstruction::kUint32)) { + return DefineAsRegister(new(zone()) LUint32ToDouble(UseRegister(val))); } else { - return DefineAsRegister( - new(zone()) LInteger32ToDouble(Use(instr->value()))); + return DefineAsRegister(new(zone()) LInteger32ToDouble(Use(val))); } } } @@ -1894,7 +1900,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckNonSmi(value)); + LInstruction* result = new(zone()) LCheckNonSmi(value); + if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result); + return result; } @@ -1918,15 +1926,12 @@ LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { - LOperand* value = NULL; - if (!instr->CanOmitMapChecks()) { - value = UseRegisterAtStart(instr->value()); - if (instr->has_migration_target()) info()->MarkAsDeferredCalling(); - } - LCheckMaps* result = new(zone()) LCheckMaps(value); - if (!instr->CanOmitMapChecks()) { - AssignEnvironment(result); - if (instr->has_migration_target()) return AssignPointerMap(result); + if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps; + LOperand* value = UseRegisterAtStart(instr->value()); + LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value)); + if (instr->HasMigrationTarget()) { + info()->MarkAsDeferredCalling(); + result = AssignPointerMap(result); } return result; } @@ -2025,7 +2030,10 @@ LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) { LOperand* context = UseRegisterAtStart(instr->value()); LInstruction* result = DefineAsRegister(new(zone()) LLoadContextSlot(context)); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -2040,7 +2048,10 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { value = UseRegister(instr->value()); } LInstruction* result = new(zone()) LStoreContextSlot(context, value); - return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result; + if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { + result = AssignEnvironment(result); + } + return result; } @@ -2075,7 +2086,7 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { ASSERT(instr->key()->representation().IsSmiOrInteger32()); ElementsKind elements_kind = instr->elements_kind(); LOperand* key = UseRegisterOrConstantAtStart(instr->key()); - LLoadKeyed* result = NULL; + LInstruction* result = NULL; if (!instr->is_typed_elements()) { LOperand* obj = NULL; @@ -2085,24 +2096,28 @@ LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { ASSERT(instr->representation().IsSmiOrTagged()); obj = UseRegisterAtStart(instr->elements()); } - result = new(zone()) LLoadKeyed(obj, key); + result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key)); } else { ASSERT( (instr->representation().IsInteger32() && - !IsDoubleOrFloatElementsKind(instr->elements_kind())) || + !IsDoubleOrFloatElementsKind(elements_kind)) || (instr->representation().IsDouble() && - IsDoubleOrFloatElementsKind(instr->elements_kind()))); + IsDoubleOrFloatElementsKind(elements_kind))); LOperand* backing_store = UseRegister(instr->elements()); - result = new(zone()) LLoadKeyed(backing_store, key); + result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key)); } - DefineAsRegister(result); - // An unsigned int array load might overflow and cause a deopt, make sure it - // has an environment. - bool can_deoptimize = instr->RequiresHoleCheck() || - elements_kind == EXTERNAL_UINT32_ELEMENTS || - elements_kind == UINT32_ELEMENTS; - return can_deoptimize ? AssignEnvironment(result) : result; + if ((instr->is_external() || instr->is_fixed_typed_array()) ? + // see LCodeGen::DoLoadKeyedExternalArray + ((elements_kind == EXTERNAL_UINT32_ELEMENTS || + elements_kind == UINT32_ELEMENTS) && + !instr->CheckFlag(HInstruction::kUint32)) : + // see LCodeGen::DoLoadKeyedFixedDoubleArray and + // LCodeGen::DoLoadKeyedFixedArray + instr->RequiresHoleCheck()) { + result = AssignEnvironment(result); + } + return result; } @@ -2178,17 +2193,18 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { - LOperand* object = UseRegister(instr->object()); if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { + LOperand* object = UseRegister(instr->object()); LOperand* new_map_reg = TempRegister(); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, NULL, new_map_reg); return result; } else { + LOperand* object = UseFixed(instr->object(), a0); LOperand* context = UseFixed(instr->context(), cp); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, context, NULL); - return AssignPointerMap(result); + return MarkAsCall(result, instr); } } @@ -2232,11 +2248,11 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { // We need a temporary register for write barrier of the map field. LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL; - LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp); - if (instr->field_representation().IsHeapObject()) { - if (!instr->value()->type().IsHeapObject()) { - return AssignEnvironment(result); - } + LInstruction* result = new(zone()) LStoreNamedField(obj, val, temp); + if (!instr->access().IsExternalMemory() && + instr->field_representation().IsHeapObject() && + !instr->value()->type().IsHeapObject()) { + result = AssignEnvironment(result); } return result; } @@ -2268,7 +2284,7 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { LOperand* context = UseAny(instr->context()); LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(context, string, index); - return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); + return AssignPointerMap(DefineAsRegister(result)); } @@ -2324,7 +2340,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { } else { ASSERT(info()->IsStub()); CodeStubInterfaceDescriptor* descriptor = - info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); + info()->code_stub()->GetInterfaceDescriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor->GetParameterRegister(index); return DefineFixed(result, reg); @@ -2431,6 +2447,7 @@ LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); + outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining(instr->closure(), instr->arguments_count(), @@ -2491,7 +2508,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegister(instr->object()); LOperand* index = UseRegister(instr->index()); - return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); + LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index); + LInstruction* result = DefineSameAsFirst(load); + return AssignPointerMap(result); } diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h index ae59e57f2..14ac0a482 100644 --- a/deps/v8/src/mips/lithium-mips.h +++ b/deps/v8/src/mips/lithium-mips.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_LITHIUM_MIPS_H_ #define V8_MIPS_LITHIUM_MIPS_H_ @@ -97,6 +74,7 @@ class LCodeGen; V(DummyUse) \ V(FlooringDivByConstI) \ V(FlooringDivByPowerOf2I) \ + V(FlooringDivI) \ V(ForInCacheArray) \ V(ForInPrepareMap) \ V(FunctionLiteral) \ @@ -258,7 +236,9 @@ class LInstruction : public ZoneObject { // Interface to the register allocator and iterators. bool ClobbersTemps() const { return IsCall(); } bool ClobbersRegisters() const { return IsCall(); } - virtual bool ClobbersDoubleRegisters() const { return IsCall(); } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const { + return IsCall(); + } // Interface to the register allocator and iterators. bool IsMarkedAsCall() const { return IsCall(); } @@ -709,13 +689,13 @@ class LDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 0> { class LDivI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: - LDivI(LOperand* left, LOperand* right) { - inputs_[0] = left; - inputs_[1] = right; + LDivI(LOperand* dividend, LOperand* divisor) { + inputs_[0] = dividend; + inputs_[1] = divisor; } - LOperand* left() { return inputs_[0]; } - LOperand* right() { return inputs_[1]; } + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i") DECLARE_HYDROGEN_ACCESSOR(BinaryOperation) @@ -761,6 +741,21 @@ class LFlooringDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 2> { }; +class LFlooringDivI V8_FINAL : public LTemplateInstruction<1, 2, 0> { + public: + LFlooringDivI(LOperand* dividend, LOperand* divisor) { + inputs_[0] = dividend; + inputs_[1] = divisor; + } + + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } + + DECLARE_CONCRETE_INSTRUCTION(FlooringDivI, "flooring-div-i") + DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) +}; + + class LMulI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: LMulI(LOperand* left, LOperand* right) { @@ -1931,7 +1926,7 @@ class LCallRuntime V8_FINAL : public LTemplateInstruction<1, 1, 0> { DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime") DECLARE_HYDROGEN_ACCESSOR(CallRuntime) - virtual bool ClobbersDoubleRegisters() const V8_OVERRIDE { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { return save_doubles() == kDontSaveFPRegs; } @@ -2127,7 +2122,6 @@ class LStoreNamedField V8_FINAL : public LTemplateInstruction<0, 2, 1> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; - Handle<Map> transition() const { return hydrogen()->transition_map(); } Representation representation() const { return hydrogen()->field_representation(); } @@ -2336,7 +2330,7 @@ class LCheckInstanceType V8_FINAL : public LTemplateInstruction<0, 1, 0> { class LCheckMaps V8_FINAL : public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMaps(LOperand* value) { + explicit LCheckMaps(LOperand* value = NULL) { inputs_[0] = value; } @@ -2655,6 +2649,8 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { next_block_(NULL), allocator_(allocator) { } + Isolate* isolate() const { return graph_->isolate(); } + // Build the sequence for the graph. LPlatformChunk* Build(); @@ -2679,12 +2675,13 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { LInstruction* DoMathClz32(HUnaryMathOperation* instr); LInstruction* DoDivByPowerOf2I(HDiv* instr); LInstruction* DoDivByConstI(HDiv* instr); - LInstruction* DoDivI(HBinaryOperation* instr); + LInstruction* DoDivI(HDiv* instr); LInstruction* DoModByPowerOf2I(HMod* instr); LInstruction* DoModByConstI(HMod* instr); LInstruction* DoModI(HMod* instr); LInstruction* DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr); LInstruction* DoFlooringDivByConstI(HMathFloorOfDiv* instr); + LInstruction* DoFlooringDivI(HMathFloorOfDiv* instr); private: enum Status { diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc index 77c02e734..9291e20fa 100644 --- a/deps/v8/src/mips/macro-assembler-mips.cc +++ b/deps/v8/src/mips/macro-assembler-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <limits.h> // For LONG_MIN, LONG_MAX. @@ -302,7 +279,8 @@ void MacroAssembler::RecordWrite(Register object, if (ra_status == kRAHasNotBeenSaved) { push(ra); } - RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); + RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, + fp_mode); CallStub(&stub); if (ra_status == kRAHasNotBeenSaved) { pop(ra); @@ -352,7 +330,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. } push(ra); StoreBufferOverflowStub store_buffer_overflow = - StoreBufferOverflowStub(fp_mode); + StoreBufferOverflowStub(isolate(), fp_mode); CallStub(&store_buffer_overflow); pop(ra); bind(&done); @@ -1456,7 +1434,7 @@ void MacroAssembler::TruncateDoubleToI(Register result, Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack. sdc1(double_input, MemOperand(sp, 0)); - DoubleToIStub stub(sp, result, 0, true, true); + DoubleToIStub stub(isolate(), sp, result, 0, true, true); CallStub(&stub); Addu(sp, sp, Operand(kDoubleSize)); @@ -1477,7 +1455,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) { // If we fell through then inline version didn't succeed - call stub instead. push(ra); - DoubleToIStub stub(object, + DoubleToIStub stub(isolate(), + object, result, HeapNumber::kValueOffset - kHeapObjectTag, true, @@ -2091,7 +2070,7 @@ void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs, case Uless_equal: if (rt.imm32_ == 0) { offset = shifted_branch_offset(L, false); - b(offset); + beq(rs, zero_reg, offset); } else { ASSERT(!scratch.is(rs)); r2 = scratch; @@ -2686,18 +2665,14 @@ void MacroAssembler::Push(Handle<Object> handle) { } -#ifdef ENABLE_DEBUGGER_SUPPORT - void MacroAssembler::DebugBreak() { PrepareCEntryArgs(0); PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate())); - CEntryStub ces(1); + CEntryStub ces(isolate(), 1); ASSERT(AllowThisStubCall(&ces)); - Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); + Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } -#endif // ENABLE_DEBUGGER_SUPPORT - // --------------------------------------------------------------------------- // Exception handling. @@ -3313,13 +3288,24 @@ void MacroAssembler::CopyBytes(Register src, // TODO(kalmard) check if this can be optimized to use sw in most cases. // Can't use unaligned access - copy byte by byte. - sb(scratch, MemOperand(dst, 0)); - srl(scratch, scratch, 8); - sb(scratch, MemOperand(dst, 1)); - srl(scratch, scratch, 8); - sb(scratch, MemOperand(dst, 2)); - srl(scratch, scratch, 8); - sb(scratch, MemOperand(dst, 3)); + if (kArchEndian == kLittle) { + sb(scratch, MemOperand(dst, 0)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 1)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 2)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 3)); + } else { + sb(scratch, MemOperand(dst, 3)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 2)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 1)); + srl(scratch, scratch, 8); + sb(scratch, MemOperand(dst, 0)); + } + Addu(dst, dst, 4); Subu(length, length, Operand(kPointerSize)); @@ -3424,11 +3410,12 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, bind(&have_double_value); sll(scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize); Addu(scratch1, scratch1, elements_reg); - sw(mantissa_reg, FieldMemOperand( - scratch1, FixedDoubleArray::kHeaderSize - elements_offset)); - uint32_t offset = FixedDoubleArray::kHeaderSize - elements_offset + - sizeof(kHoleNanLower32); - sw(exponent_reg, FieldMemOperand(scratch1, offset)); + sw(mantissa_reg, + FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset + + kHoleNanLower32Offset)); + sw(exponent_reg, + FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset + + kHoleNanUpper32Offset)); jmp(&done); bind(&maybe_nan); @@ -3526,7 +3513,11 @@ void MacroAssembler::CheckMap(Register obj, void MacroAssembler::MovFromFloatResult(DoubleRegister dst) { if (IsMipsSoftFloatABI) { - Move(dst, v0, v1); + if (kArchEndian == kLittle) { + Move(dst, v0, v1); + } else { + Move(dst, v1, v0); + } } else { Move(dst, f0); // Reg f0 is o32 ABI FP return value. } @@ -3535,7 +3526,11 @@ void MacroAssembler::MovFromFloatResult(DoubleRegister dst) { void MacroAssembler::MovFromFloatParameter(DoubleRegister dst) { if (IsMipsSoftFloatABI) { - Move(dst, a0, a1); + if (kArchEndian == kLittle) { + Move(dst, a0, a1); + } else { + Move(dst, a1, a0); + } } else { Move(dst, f12); // Reg f12 is o32 ABI FP first argument value. } @@ -3546,7 +3541,11 @@ void MacroAssembler::MovToFloatParameter(DoubleRegister src) { if (!IsMipsSoftFloatABI) { Move(f12, src); } else { - Move(a0, a1, src); + if (kArchEndian == kLittle) { + Move(a0, a1, src); + } else { + Move(a1, a0, src); + } } } @@ -3555,7 +3554,11 @@ void MacroAssembler::MovToFloatResult(DoubleRegister src) { if (!IsMipsSoftFloatABI) { Move(f0, src); } else { - Move(v0, v1, src); + if (kArchEndian == kLittle) { + Move(v0, v1, src); + } else { + Move(v1, v0, src); + } } } @@ -3572,8 +3575,13 @@ void MacroAssembler::MovToFloatParameters(DoubleRegister src1, Move(f14, src2); } } else { - Move(a0, a1, src1); - Move(a2, a3, src2); + if (kArchEndian == kLittle) { + Move(a0, a1, src1); + Move(a2, a3, src2); + } else { + Move(a1, a0, src1); + Move(a3, a2, src2); + } } } @@ -3859,7 +3867,7 @@ void MacroAssembler::CallStub(CodeStub* stub, const Operand& r2, BranchDelaySlot bd) { ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. - Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id, + Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond, r1, r2, bd); } @@ -3869,7 +3877,7 @@ void MacroAssembler::TailCallStub(CodeStub* stub, Register r1, const Operand& r2, BranchDelaySlot bd) { - Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond, r1, r2, bd); + Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd); } @@ -3898,10 +3906,7 @@ void MacroAssembler::CallApiFunctionAndReturn( Label profiler_disabled; Label end_profiler_check; - bool* is_profiling_flag = - isolate()->cpu_profiler()->is_profiling_address(); - STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); - li(t9, reinterpret_cast<int32_t>(is_profiling_flag)); + li(t9, Operand(ExternalReference::is_profiling_address(isolate()))); lb(t9, MemOperand(t9, 0)); Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); @@ -3933,7 +3938,7 @@ void MacroAssembler::CallApiFunctionAndReturn( // Native call returns to the DirectCEntry stub which redirects to the // return address pushed on stack (could have moved after GC). // DirectCEntry stub itself is generated early and never moves. - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(this, t9); if (FLAG_log_timer_events) { @@ -4010,14 +4015,6 @@ bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { } -void MacroAssembler::IllegalOperation(int num_arguments) { - if (num_arguments > 0) { - addiu(sp, sp, num_arguments * kPointerSize); - } - LoadRoot(v0, Heap::kUndefinedValueRootIndex); -} - - void MacroAssembler::IndexFromHash(Register hash, Register index) { // If the hash field contains an array index pick it out. The assert checks @@ -4172,10 +4169,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // If the expected number of arguments of the runtime function is // constant, we check that the actual number of arguments match the // expectation. - if (f->nargs >= 0 && f->nargs != num_arguments) { - IllegalOperation(num_arguments); - return; - } + CHECK(f->nargs < 0 || f->nargs == num_arguments); // TODO(1236192): Most runtime routines don't need the number of // arguments passed in because it is constant. At some point we @@ -4183,7 +4177,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // smarter. PrepareCEntryArgs(num_arguments); PrepareCEntryFunction(ExternalReference(f, isolate())); - CEntryStub stub(1, save_doubles); + CEntryStub stub(isolate(), 1, save_doubles); CallStub(&stub); } @@ -4194,7 +4188,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext, PrepareCEntryArgs(num_arguments); PrepareCEntryFunction(ext); - CEntryStub stub(1); + CEntryStub stub(isolate(), 1); CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd); } @@ -4223,8 +4217,8 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin, BranchDelaySlot bd) { PrepareCEntryFunction(builtin); - CEntryStub stub(1); - Jump(stub.GetCode(isolate()), + CEntryStub stub(isolate(), 1); + Jump(stub.GetCode(), RelocInfo::CODE_TARGET, al, zero_reg, @@ -4466,7 +4460,7 @@ void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); } else { PredictableCodeSizeScope predictible_code_size_scope( - this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); + this, kNoCodeAgeSequenceLength); // The following three instructions must remain together and unmodified // for code aging to work properly. if (isolate()->IsCodePreAgingActive()) { @@ -5670,10 +5664,13 @@ bool AreAliased(Register r1, Register r2, Register r3, Register r4) { } -CodePatcher::CodePatcher(byte* address, int instructions) +CodePatcher::CodePatcher(byte* address, + int instructions, + FlushICache flush_cache) : address_(address), size_(instructions * Assembler::kInstrSize), - masm_(NULL, address, size_ + Assembler::kGap) { + masm_(NULL, address, size_ + Assembler::kGap), + flush_cache_(flush_cache) { // Create a new macro assembler pointing to the address of the code to patch. // The size is adjusted with kGap on order for the assembler to generate size // bytes of instructions without failing with buffer size constraints. @@ -5683,7 +5680,9 @@ CodePatcher::CodePatcher(byte* address, int instructions) CodePatcher::~CodePatcher() { // Indicate that code has changed. - CPU::FlushICache(address_, size_); + if (flush_cache_ == FLUSH) { + CPU::FlushICache(address_, size_); + } // Check that the code was patched as expected. ASSERT(masm_.pc_ == address_ + size_); diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h index db9f1a2c7..774449cab 100644 --- a/deps/v8/src/mips/macro-assembler-mips.h +++ b/deps/v8/src/mips/macro-assembler-mips.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ #define V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ @@ -932,13 +909,10 @@ class MacroAssembler: public Assembler { Register scratch, Label* fail); -#ifdef ENABLE_DEBUGGER_SUPPORT // ------------------------------------------------------------------------- // Debugger Support. void DebugBreak(); -#endif - // ------------------------------------------------------------------------- // Exception handling. @@ -1074,10 +1048,6 @@ class MacroAssembler: public Assembler { Handle<Code> success, SmiCheckType smi_check_type); - // Generates code for reporting that an illegal operation has - // occurred. - void IllegalOperation(int num_arguments); - // Load and check the instance type of an object for being a string. // Loads the type into the second argument register. @@ -1644,7 +1614,14 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT // an assertion to fail. class CodePatcher { public: - CodePatcher(byte* address, int instructions); + enum FlushICache { + FLUSH, + DONT_FLUSH + }; + + CodePatcher(byte* address, + int instructions, + FlushICache flush_cache = FLUSH); virtual ~CodePatcher(); // Macro assembler to emit code. @@ -1664,6 +1641,7 @@ class CodePatcher { byte* address_; // The address of the code being patched. int size_; // Number of bytes of the expected patch size. MacroAssembler masm_; // Macro assembler used to generate the code. + FlushICache flush_cache_; // Whether to flush the I cache after patching. }; diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc index 49dec3c02..7c8fde900 100644 --- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc +++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -1096,7 +1073,7 @@ void RegExpMacroAssemblerMIPS::CallCheckStackGuardState(Register scratch) { ExternalReference stack_guard_check = ExternalReference::re_check_stack_guard_state(masm_->isolate()); __ li(t9, Operand(stack_guard_check)); - DirectCEntryStub stub; + DirectCEntryStub stub(isolate()); stub.GenerateCall(masm_, t9); // DirectCEntryStub allocated space for the C argument slots so we have to @@ -1153,7 +1130,7 @@ int RegExpMacroAssemblerMIPS::CheckStackGuardState(Address* return_address, ASSERT(*return_address <= re_code->instruction_start() + re_code->instruction_size()); - MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); + Object* result = Execution::HandleStackGuardInterrupt(isolate); if (*code_handle != re_code) { // Return address no longer valid. int delta = code_handle->address() - re_code->address(); diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.h b/deps/v8/src/mips/regexp-macro-assembler-mips.h index 063582c64..f0aba07dc 100644 --- a/deps/v8/src/mips/regexp-macro-assembler-mips.h +++ b/deps/v8/src/mips/regexp-macro-assembler-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MIPS_REGEXP_MACRO_ASSEMBLER_MIPS_H_ diff --git a/deps/v8/src/mips/simulator-mips.cc b/deps/v8/src/mips/simulator-mips.cc index d26499bbc..51f679bdc 100644 --- a/deps/v8/src/mips/simulator-mips.cc +++ b/deps/v8/src/mips/simulator-mips.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <limits.h> #include <stdarg.h> diff --git a/deps/v8/src/mips/simulator-mips.h b/deps/v8/src/mips/simulator-mips.h index 92a0a87d2..feeb7bcfc 100644 --- a/deps/v8/src/mips/simulator-mips.h +++ b/deps/v8/src/mips/simulator-mips.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Declares a Simulator for MIPS instructions if we are not generating a native diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc index 153a81682..abccc9496 100644 --- a/deps/v8/src/mips/stub-cache-mips.cc +++ b/deps/v8/src/mips/stub-cache-mips.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -416,6 +393,24 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = descriptors->GetFieldType(descriptor); + HeapType::Iterator<Map> it = field_type->Classes(); + Handle<Map> current; + if (!it.Done()) { + __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + while (true) { + // Do the CompareMap() directly within the Branch() functions. + current = it.Current(); + it.Advance(); + if (it.Done()) { + __ Branch(miss_label, ne, scratch1, Operand(current)); + break; + } + __ Branch(&do_store, eq, scratch1, Operand(current)); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { Label do_store, heap_number; __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); @@ -579,6 +574,24 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = lookup->GetFieldType(); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); + Label do_store; + Handle<Map> current; + while (true) { + // Do the CompareMap() directly within the Branch() functions. + current = it.Current(); + it.Advance(); + if (it.Done()) { + __ Branch(miss_label, ne, scratch1, Operand(current)); + break; + } + __ Branch(&do_store, eq, scratch1, Operand(current)); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { // Load the double storage. if (index < 0) { @@ -789,7 +802,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, __ li(api_function_address, Operand(ref)); // Jump to stub. - CallApiFunctionStub stub(is_store, call_data_undefined, argc); + CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); __ TailCallStub(&stub); } @@ -824,7 +837,9 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, int depth = 0; Handle<JSObject> current = Handle<JSObject>::null(); - if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); + if (type->IsConstant()) { + current = Handle<JSObject>::cast(type->AsConstant()->Value()); + } Handle<JSObject> prototype = Handle<JSObject>::null(); Handle<Map> current_map = receiver_map; Handle<Map> holder_map(holder->map()); @@ -847,7 +862,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(current.is_null() || - current->property_dictionary()->FindEntry(*name) == + current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); GenerateDictionaryNegativeLookup(masm(), miss, reg, name, @@ -986,15 +1001,17 @@ void LoadStubCompiler::GenerateLoadField(Register reg, Representation representation) { if (!reg.is(receiver())) __ mov(receiver(), reg); if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(field.is_inobject(holder), + LoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } else { - KeyedLoadFieldStub stub(field.is_inobject(holder), + KeyedLoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } } @@ -1051,7 +1068,7 @@ void LoadStubCompiler::GenerateLoadCallback( ExternalReference ref = ExternalReference(&fun, type, isolate()); __ li(getter_address_reg, Operand(ref)); - CallApiGetterStub stub; + CallApiGetterStub stub(isolate()); __ TailCallStub(&stub); } @@ -1141,17 +1158,6 @@ void LoadStubCompiler::GenerateLoadInterceptor( } -void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { - Label success; - // Check that the object is a boolean. - __ LoadRoot(at, Heap::kTrueValueRootIndex); - __ Branch(&success, eq, object, Operand(at)); - __ LoadRoot(at, Heap::kFalseValueRootIndex); - __ Branch(miss, ne, object, Operand(at)); - __ bind(&success); -} - - Handle<Code> StoreStubCompiler::CompileStoreCallback( Handle<JSObject> object, Handle<JSObject> holder, diff --git a/deps/v8/src/mirror-debugger.js b/deps/v8/src/mirror-debugger.js index d413b090b..fde3f10f3 100644 --- a/deps/v8/src/mirror-debugger.js +++ b/deps/v8/src/mirror-debugger.js @@ -1,29 +1,6 @@ // Copyright 2006-2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Handle id counters. var next_handle_ = 0; @@ -42,6 +19,18 @@ function ClearMirrorCache() { } +// Wrapper to check whether an object is a Promise. The call may not work +// if promises are not enabled. +// TODO(yangguo): remove this wrapper once promises are enabled by default. +function ObjectIsPromise(value) { + try { + return %IsPromise(value); + } catch (e) { + return false; + } +} + + /** * Returns the mirror for a specified value or object. * @@ -90,6 +79,8 @@ function MakeMirror(value, opt_transient) { mirror = new ErrorMirror(value); } else if (IS_SCRIPT(value)) { mirror = new ScriptMirror(value); + } else if (ObjectIsPromise(value)) { + mirror = new PromiseMirror(value); } else { mirror = new ObjectMirror(value, OBJECT_TYPE, opt_transient); } @@ -159,6 +150,7 @@ var FRAME_TYPE = 'frame'; var SCRIPT_TYPE = 'script'; var CONTEXT_TYPE = 'context'; var SCOPE_TYPE = 'scope'; +var PROMISE_TYPE = 'promise'; // Maximum length when sending strings through the JSON protocol. var kMaxProtocolStringLength = 80; @@ -212,6 +204,7 @@ var ScopeType = { Global: 0, // - DateMirror // - RegExpMirror // - ErrorMirror +// - PromiseMirror // - PropertyMirror // - InternalPropertyMirror // - FrameMirror @@ -351,6 +344,15 @@ Mirror.prototype.isError = function() { /** + * Check whether the mirror reflects a promise. + * @returns {boolean} True if the mirror reflects a promise + */ +Mirror.prototype.isPromise = function() { + return this instanceof PromiseMirror; +}; + + +/** * Check whether the mirror reflects a property. * @returns {boolean} True if the mirror reflects a property */ @@ -637,9 +639,9 @@ ObjectMirror.prototype.propertyNames = function(kind, limit) { // Find all the named properties. if (kind & PropertyKind.Named) { - // Get all the local property names. + // Get all the local property names except for private symbols. propertyNames = - %GetLocalPropertyNames(this.value_, PROPERTY_ATTRIBUTES_NONE); + %GetLocalPropertyNames(this.value_, PROPERTY_ATTRIBUTES_PRIVATE_SYMBOL); total += propertyNames.length; // Get names for named interceptor properties if any. @@ -1172,6 +1174,31 @@ ErrorMirror.prototype.toText = function() { /** + * Mirror object for a Promise object. + * @param {Object} data The Promise object + * @constructor + * @extends Mirror + */ +function PromiseMirror(value) { + %_CallFunction(this, value, PROMISE_TYPE, ObjectMirror); +} +inherits(PromiseMirror, ObjectMirror); + + +PromiseMirror.prototype.status = function() { + var status = builtins.GetPromiseStatus(this.value_); + if (status == 0) return "pending"; + if (status == 1) return "resolved"; + return "rejected"; +}; + + +PromiseMirror.prototype.promiseValue = function() { + return builtins.GetPromiseValue(this.value_); +}; + + +/** * Base mirror object for properties. * @param {ObjectMirror} mirror The mirror object having this property * @param {string} name The name of the property @@ -2350,6 +2377,7 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference, case FUNCTION_TYPE: case ERROR_TYPE: case REGEXP_TYPE: + case PROMISE_TYPE: // Add object representation. this.serializeObject_(mirror, content, details); break; @@ -2452,7 +2480,6 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content, content.indexedInterceptor = true; } - // Add function specific properties. if (mirror.isFunction()) { // Add function specific properties. content.name = mirror.name(); @@ -2480,12 +2507,17 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content, } } - // Add date specific properties. if (mirror.isDate()) { // Add date specific properties. content.value = mirror.value(); } + if (mirror.isPromise()) { + // Add promise specific properties. + content.status = mirror.status(); + content.promiseValue = mirror.promiseValue(); + } + // Add actual properties - named properties followed by indexed properties. var propertyNames = mirror.propertyNames(PropertyKind.Named); var propertyIndexes = mirror.propertyNames(PropertyKind.Indexed); diff --git a/deps/v8/src/misc-intrinsics.h b/deps/v8/src/misc-intrinsics.h index 5393de2c2..a8582bbe1 100644 --- a/deps/v8/src/misc-intrinsics.h +++ b/deps/v8/src/misc-intrinsics.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MISC_INTRINSICS_H_ #define V8_MISC_INTRINSICS_H_ diff --git a/deps/v8/src/mksnapshot.cc b/deps/v8/src/mksnapshot.cc index 07b057549..a8bf871f1 100644 --- a/deps/v8/src/mksnapshot.cc +++ b/deps/v8/src/mksnapshot.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <errno.h> #include <stdio.h> @@ -41,6 +18,10 @@ #include "serialize.h" #include "list.h" +#if V8_TARGET_ARCH_ARM +#include "arm/assembler-arm-inl.h" +#endif + using namespace v8; @@ -52,148 +33,173 @@ class Compressor { }; -class PartialSnapshotSink : public i::SnapshotByteSink { +class ListSnapshotSink : public i::SnapshotByteSink { + public: + explicit ListSnapshotSink(i::List<char>* data) : data_(data) { } + virtual ~ListSnapshotSink() {} + virtual void Put(int byte, const char* description) { data_->Add(byte); } + virtual int Position() { return data_->length(); } + private: + i::List<char>* data_; +}; + + +class SnapshotWriter { public: - PartialSnapshotSink() : data_(), raw_size_(-1) { } - virtual ~PartialSnapshotSink() { data_.Free(); } - virtual void Put(int byte, const char* description) { - data_.Add(byte); + explicit SnapshotWriter(const char* snapshot_file) + : fp_(GetFileDescriptorOrDie(snapshot_file)) + , raw_file_(NULL) + , raw_context_file_(NULL) + , compressor_(NULL) + , omit_(false) { } - virtual int Position() { return data_.length(); } - void Print(FILE* fp) { - int length = Position(); - for (int j = 0; j < length; j++) { - if ((j & 0x1f) == 0x1f) { - fprintf(fp, "\n"); - } - if (j != 0) { - fprintf(fp, ","); - } - fprintf(fp, "%u", static_cast<unsigned char>(at(j))); - } + + ~SnapshotWriter() { + fclose(fp_); + if (raw_file_) fclose(raw_file_); + if (raw_context_file_) fclose(raw_context_file_); } - char at(int i) { return data_[i]; } - bool Compress(Compressor* compressor) { - ASSERT_EQ(-1, raw_size_); - raw_size_ = data_.length(); - if (!compressor->Compress(data_.ToVector())) return false; - data_.Clear(); - data_.AddAll(*compressor->output()); - return true; + + void SetCompressor(Compressor* compressor) { + compressor_ = compressor; } - int raw_size() { return raw_size_; } - private: - i::List<char> data_; - int raw_size_; -}; + void SetOmit(bool omit) { + omit_ = omit; + } + void SetRawFiles(const char* raw_file, const char* raw_context_file) { + raw_file_ = GetFileDescriptorOrDie(raw_file); + raw_context_file_ = GetFileDescriptorOrDie(raw_context_file); + } -class CppByteSink : public PartialSnapshotSink { - public: - explicit CppByteSink(const char* snapshot_file) { - fp_ = i::OS::FOpen(snapshot_file, "wb"); - if (fp_ == NULL) { - i::PrintF("Unable to write to snapshot file \"%s\"\n", snapshot_file); - exit(1); - } + void WriteSnapshot(const i::List<char>& snapshot_data, + const i::Serializer& serializer, + const i::List<char>& context_snapshot_data, + const i::Serializer& context_serializer) const { + WriteFilePrefix(); + WriteData("", snapshot_data, raw_file_); + WriteData("context_", context_snapshot_data, raw_context_file_); + WriteMeta("context_", context_serializer); + WriteMeta("", serializer); + WriteFileSuffix(); + } + + private: + void WriteFilePrefix() const { fprintf(fp_, "// Autogenerated snapshot file. Do not edit.\n\n"); fprintf(fp_, "#include \"v8.h\"\n"); fprintf(fp_, "#include \"platform.h\"\n\n"); fprintf(fp_, "#include \"snapshot.h\"\n\n"); - fprintf(fp_, "namespace v8 {\nnamespace internal {\n\n"); - fprintf(fp_, "const byte Snapshot::data_[] = {"); + fprintf(fp_, "namespace v8 {\n"); + fprintf(fp_, "namespace internal {\n\n"); } - virtual ~CppByteSink() { - fprintf(fp_, "const int Snapshot::size_ = %d;\n", Position()); -#ifdef COMPRESS_STARTUP_DATA_BZ2 - fprintf(fp_, "const byte* Snapshot::raw_data_ = NULL;\n"); - fprintf(fp_, - "const int Snapshot::raw_size_ = %d;\n\n", - raw_size()); -#else - fprintf(fp_, - "const byte* Snapshot::raw_data_ = Snapshot::data_;\n"); - fprintf(fp_, - "const int Snapshot::raw_size_ = Snapshot::size_;\n\n"); -#endif - fprintf(fp_, "} } // namespace v8::internal\n"); - fclose(fp_); + void WriteFileSuffix() const { + fprintf(fp_, "} // namespace internal\n"); + fprintf(fp_, "} // namespace v8\n"); + } + + void WriteData(const char* prefix, + const i::List<char>& source_data, + FILE* raw_file) const { + const i::List <char>* data_to_be_written = NULL; + i::List<char> compressed_data; + if (!compressor_) { + data_to_be_written = &source_data; + } else if (compressor_->Compress(source_data.ToVector())) { + compressed_data.AddAll(*compressor_->output()); + data_to_be_written = &compressed_data; + } else { + i::PrintF("Compression failed. Aborting.\n"); + exit(1); + } + + ASSERT(data_to_be_written); + MaybeWriteRawFile(data_to_be_written, raw_file); + WriteData(prefix, source_data, data_to_be_written); } - void WriteSpaceUsed( - const char* prefix, - int new_space_used, - int pointer_space_used, - int data_space_used, - int code_space_used, - int map_space_used, - int cell_space_used, - int property_cell_space_used) { - fprintf(fp_, - "const int Snapshot::%snew_space_used_ = %d;\n", - prefix, - new_space_used); - fprintf(fp_, - "const int Snapshot::%spointer_space_used_ = %d;\n", - prefix, - pointer_space_used); - fprintf(fp_, - "const int Snapshot::%sdata_space_used_ = %d;\n", - prefix, - data_space_used); - fprintf(fp_, - "const int Snapshot::%scode_space_used_ = %d;\n", - prefix, - code_space_used); - fprintf(fp_, - "const int Snapshot::%smap_space_used_ = %d;\n", - prefix, - map_space_used); - fprintf(fp_, - "const int Snapshot::%scell_space_used_ = %d;\n", - prefix, - cell_space_used); - fprintf(fp_, - "const int Snapshot::%sproperty_cell_space_used_ = %d;\n", - prefix, - property_cell_space_used); + void MaybeWriteRawFile(const i::List<char>* data, FILE* raw_file) const { + if (!data || !raw_file) + return; + + // Sanity check, whether i::List iterators truly return pointers to an + // internal array. + ASSERT(data->end() - data->begin() == data->length()); + + size_t written = fwrite(data->begin(), 1, data->length(), raw_file); + if (written != (size_t)data->length()) { + i::PrintF("Writing raw file failed.. Aborting.\n"); + exit(1); + } } - void WritePartialSnapshot() { - int length = partial_sink_.Position(); - fprintf(fp_, "};\n\n"); - fprintf(fp_, "const int Snapshot::context_size_ = %d;\n", length); -#ifdef COMPRESS_STARTUP_DATA_BZ2 - fprintf(fp_, - "const int Snapshot::context_raw_size_ = %d;\n", - partial_sink_.raw_size()); -#else - fprintf(fp_, - "const int Snapshot::context_raw_size_ = " - "Snapshot::context_size_;\n"); -#endif - fprintf(fp_, "const byte Snapshot::context_data_[] = {\n"); - partial_sink_.Print(fp_); - fprintf(fp_, "};\n\n"); -#ifdef COMPRESS_STARTUP_DATA_BZ2 - fprintf(fp_, "const byte* Snapshot::context_raw_data_ = NULL;\n"); -#else - fprintf(fp_, "const byte* Snapshot::context_raw_data_ =" - " Snapshot::context_data_;\n"); -#endif + void WriteData(const char* prefix, + const i::List<char>& source_data, + const i::List<char>* data_to_be_written) const { + fprintf(fp_, "const byte Snapshot::%sdata_[] = {\n", prefix); + if (!omit_) + WriteSnapshotData(data_to_be_written); + fprintf(fp_, "};\n"); + fprintf(fp_, "const int Snapshot::%ssize_ = %d;\n", prefix, + data_to_be_written->length()); + + if (data_to_be_written == &source_data && !omit_) { + fprintf(fp_, "const byte* Snapshot::%sraw_data_ = Snapshot::%sdata_;\n", + prefix, prefix); + fprintf(fp_, "const int Snapshot::%sraw_size_ = Snapshot::%ssize_;\n", + prefix, prefix); + } else { + fprintf(fp_, "const byte* Snapshot::%sraw_data_ = NULL;\n", prefix); + fprintf(fp_, "const int Snapshot::%sraw_size_ = %d;\n", + prefix, source_data.length()); + } + fprintf(fp_, "\n"); } - void WriteSnapshot() { - Print(fp_); + void WriteMeta(const char* prefix, const i::Serializer& ser) const { + WriteSizeVar(ser, prefix, "new", i::NEW_SPACE); + WriteSizeVar(ser, prefix, "pointer", i::OLD_POINTER_SPACE); + WriteSizeVar(ser, prefix, "data", i::OLD_DATA_SPACE); + WriteSizeVar(ser, prefix, "code", i::CODE_SPACE); + WriteSizeVar(ser, prefix, "map", i::MAP_SPACE); + WriteSizeVar(ser, prefix, "cell", i::CELL_SPACE); + WriteSizeVar(ser, prefix, "property_cell", i::PROPERTY_CELL_SPACE); + fprintf(fp_, "\n"); } - PartialSnapshotSink* partial_sink() { return &partial_sink_; } + void WriteSizeVar(const i::Serializer& ser, const char* prefix, + const char* name, int space) const { + fprintf(fp_, "const int Snapshot::%s%s_space_used_ = %d;\n", + prefix, name, ser.CurrentAllocationAddress(space)); + } + + void WriteSnapshotData(const i::List<char>* data) const { + for (int i = 0; i < data->length(); i++) { + if ((i & 0x1f) == 0x1f) + fprintf(fp_, "\n"); + if (i > 0) + fprintf(fp_, ","); + fprintf(fp_, "%u", static_cast<unsigned char>(data->at(i))); + } + fprintf(fp_, "\n"); + } + + FILE* GetFileDescriptorOrDie(const char* filename) { + FILE* fp = i::OS::FOpen(filename, "wb"); + if (fp == NULL) { + i::PrintF("Unable to open file \"%s\" for writing.\n", filename); + exit(1); + } + return fp; + } - private: FILE* fp_; - PartialSnapshotSink partial_sink_; + FILE* raw_file_; + FILE* raw_context_file_; + Compressor* compressor_; + bool omit_; }; @@ -274,7 +280,7 @@ int main(int argc, char** argv) { // Print the usage if an error occurs when parsing the command line // flags or if the help flag is set. - int result = i::FlagList::SetFlagsFromCommandLine(&argc, argv, true); + int result = i::FlagList::SetFlagsFromCommandLine(&argc, argv, true, true); if (result > 0 || argc != 2 || i::FLAG_help) { ::printf("Usage: %s [flag] ... outfile\n", argv[0]); i::FlagList::PrintHelp(); @@ -293,7 +299,7 @@ int main(int argc, char** argv) { Isolate* isolate = v8::Isolate::New(); isolate->Enter(); i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); - i::Serializer::Enable(internal_isolate); + i::Serializer::RequestEnable(internal_isolate); Persistent<Context> context; { HandleScope handle_scope(isolate); @@ -359,46 +365,32 @@ int main(int argc, char** argv) { i::Heap::kNoGCFlags, "mksnapshot"); i::Object* raw_context = *v8::Utils::OpenPersistent(context); context.Reset(); - CppByteSink sink(argv[1]); + // This results in a somewhat smaller snapshot, probably because it gets rid // of some things that are cached between garbage collections. - i::StartupSerializer ser(internal_isolate, &sink); + i::List<char> snapshot_data; + ListSnapshotSink snapshot_sink(&snapshot_data); + i::StartupSerializer ser(internal_isolate, &snapshot_sink); ser.SerializeStrongReferences(); - i::PartialSerializer partial_ser( - internal_isolate, &ser, sink.partial_sink()); - partial_ser.Serialize(&raw_context); - + i::List<char> context_data; + ListSnapshotSink contex_sink(&context_data); + i::PartialSerializer context_ser(internal_isolate, &ser, &contex_sink); + context_ser.Serialize(&raw_context); ser.SerializeWeakReferences(); + { + SnapshotWriter writer(argv[1]); + writer.SetOmit(i::FLAG_omit); + if (i::FLAG_raw_file && i::FLAG_raw_context_file) + writer.SetRawFiles(i::FLAG_raw_file, i::FLAG_raw_context_file); #ifdef COMPRESS_STARTUP_DATA_BZ2 - BZip2Compressor compressor; - if (!sink.Compress(&compressor)) - return 1; - if (!sink.partial_sink()->Compress(&compressor)) - return 1; + BZip2Compressor bzip2; + writer.SetCompressor(&bzip2); #endif - sink.WriteSnapshot(); - sink.WritePartialSnapshot(); - - sink.WriteSpaceUsed( - "context_", - partial_ser.CurrentAllocationAddress(i::NEW_SPACE), - partial_ser.CurrentAllocationAddress(i::OLD_POINTER_SPACE), - partial_ser.CurrentAllocationAddress(i::OLD_DATA_SPACE), - partial_ser.CurrentAllocationAddress(i::CODE_SPACE), - partial_ser.CurrentAllocationAddress(i::MAP_SPACE), - partial_ser.CurrentAllocationAddress(i::CELL_SPACE), - partial_ser.CurrentAllocationAddress(i::PROPERTY_CELL_SPACE)); - sink.WriteSpaceUsed( - "", - ser.CurrentAllocationAddress(i::NEW_SPACE), - ser.CurrentAllocationAddress(i::OLD_POINTER_SPACE), - ser.CurrentAllocationAddress(i::OLD_DATA_SPACE), - ser.CurrentAllocationAddress(i::CODE_SPACE), - ser.CurrentAllocationAddress(i::MAP_SPACE), - ser.CurrentAllocationAddress(i::CELL_SPACE), - ser.CurrentAllocationAddress(i::PROPERTY_CELL_SPACE)); + writer.WriteSnapshot(snapshot_data, ser, context_data, context_ser); + } + isolate->Exit(); isolate->Dispose(); V8::Dispose(); diff --git a/deps/v8/src/msan.h b/deps/v8/src/msan.h index 484c9fa39..5282583af 100644 --- a/deps/v8/src/msan.h +++ b/deps/v8/src/msan.h @@ -1,35 +1,14 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // MemorySanitizer support. #ifndef V8_MSAN_H_ #define V8_MSAN_H_ +#include "globals.h" + #ifndef __has_feature # define __has_feature(x) 0 #endif @@ -38,12 +17,12 @@ # define MEMORY_SANITIZER #endif -#ifdef MEMORY_SANITIZER -# include <sanitizer/msan_interface.h> +#if defined(MEMORY_SANITIZER) && !defined(USE_SIMULATOR) +# include <sanitizer/msan_interface.h> // NOLINT // Marks a memory range as fully initialized. -# define MSAN_MEMORY_IS_INITIALIZED(p, s) __msan_unpoison((p), (s)) +# define MSAN_MEMORY_IS_INITIALIZED_IN_JIT(p, s) __msan_unpoison((p), (s)) #else -# define MSAN_MEMORY_IS_INITIALIZED(p, s) +# define MSAN_MEMORY_IS_INITIALIZED_IN_JIT(p, s) #endif #endif // V8_MSAN_H_ diff --git a/deps/v8/src/natives.h b/deps/v8/src/natives.h index 5f34420d0..2f930dc7b 100644 --- a/deps/v8/src/natives.h +++ b/deps/v8/src/natives.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_NATIVES_H_ #define V8_NATIVES_H_ diff --git a/deps/v8/src/object-observe.js b/deps/v8/src/object-observe.js index e822f0bd4..532b0d252 100644 --- a/deps/v8/src/object-observe.js +++ b/deps/v8/src/object-observe.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; @@ -79,14 +56,11 @@ function GetWeakMapWrapper() { }; MapWrapper.prototype = { + __proto__: null, get: function(key) { - key = %UnwrapGlobalProxy(key); - if (!IS_SPEC_OBJECT(key)) return UNDEFINED; return %WeakCollectionGet(this.map_, key); }, set: function(key, value) { - key = %UnwrapGlobalProxy(key); - if (!IS_SPEC_OBJECT(key)) return UNDEFINED; %WeakCollectionSet(this.map_, key, value); }, has: function(key) { @@ -382,6 +356,8 @@ function CallbackInfoNormalize(callback) { function ObjectObserve(object, callback, acceptList) { if (!IS_SPEC_OBJECT(object)) throw MakeTypeError("observe_non_object", ["observe"]); + if (%IsJSGlobalProxy(object)) + throw MakeTypeError("observe_global_proxy", ["observe"]); if (!IS_SPEC_FUNCTION(callback)) throw MakeTypeError("observe_non_function", ["observe"]); if (ObjectIsFrozen(callback)) @@ -389,6 +365,10 @@ function ObjectObserve(object, callback, acceptList) { if (!AcceptArgIsValid(acceptList)) throw MakeTypeError("observe_accept_invalid"); + return %ObjectObserveInObjectContext(object, callback, acceptList); +} + +function NativeObjectObserve(object, callback, acceptList) { var objectInfo = ObjectInfoGetOrCreate(object); ObjectInfoAddObserver(objectInfo, callback, acceptList); return object; @@ -397,6 +377,8 @@ function ObjectObserve(object, callback, acceptList) { function ObjectUnobserve(object, callback) { if (!IS_SPEC_OBJECT(object)) throw MakeTypeError("observe_non_object", ["unobserve"]); + if (%IsJSGlobalProxy(object)) + throw MakeTypeError("observe_global_proxy", ["unobserve"]); if (!IS_SPEC_FUNCTION(callback)) throw MakeTypeError("observe_non_function", ["unobserve"]); @@ -419,27 +401,22 @@ function ArrayUnobserve(object, callback) { return ObjectUnobserve(object, callback); } -function ObserverEnqueueIfActive(observer, objectInfo, changeRecord, - needsAccessCheck) { +function ObserverEnqueueIfActive(observer, objectInfo, changeRecord) { if (!ObserverIsActive(observer, objectInfo) || !TypeMapHasType(ObserverGetAcceptTypes(observer), changeRecord.type)) { return; } var callback = ObserverGetCallback(observer); - if (needsAccessCheck && - // Drop all splice records on the floor for access-checked objects - (changeRecord.type == 'splice' || - !%IsAccessAllowedForObserver( - callback, changeRecord.object, changeRecord.name))) { + if (!%ObserverObjectAndRecordHaveSameOrigin(callback, changeRecord.object, + changeRecord)) { return; } var callbackInfo = CallbackInfoNormalize(callback); if (IS_NULL(GetPendingObservers())) { SetPendingObservers(nullProtoObject()) - GetMicrotaskQueue().push(ObserveMicrotaskRunner); - %SetMicrotaskPending(true); + EnqueueMicrotask(ObserveMicrotaskRunner); } GetPendingObservers()[callbackInfo.priority] = callback; callbackInfo.push(changeRecord); @@ -461,22 +438,16 @@ function ObjectInfoEnqueueExternalChangeRecord(objectInfo, changeRecord, type) { } ObjectFreeze(newRecord); - ObjectInfoEnqueueInternalChangeRecord(objectInfo, newRecord, - true /* skip access check */); + ObjectInfoEnqueueInternalChangeRecord(objectInfo, newRecord); } -function ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord, - skipAccessCheck) { +function ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord) { // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (IS_SYMBOL(changeRecord.name)) return; - var needsAccessCheck = !skipAccessCheck && - %IsAccessCheckNeeded(changeRecord.object); - if (ChangeObserversIsOptimized(objectInfo.changeObservers)) { var observer = objectInfo.changeObservers; - ObserverEnqueueIfActive(observer, objectInfo, changeRecord, - needsAccessCheck); + ObserverEnqueueIfActive(observer, objectInfo, changeRecord); return; } @@ -484,8 +455,7 @@ function ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord, var observer = objectInfo.changeObservers[priority]; if (IS_NULL(observer)) continue; - ObserverEnqueueIfActive(observer, objectInfo, changeRecord, - needsAccessCheck); + ObserverEnqueueIfActive(observer, objectInfo, changeRecord); } } @@ -562,7 +532,6 @@ function ObjectNotifierPerformChange(changeType, changeFn) { throw MakeTypeError("called_on_non_object", ["performChange"]); var objectInfo = ObjectInfoGetFromNotifier(this); - if (IS_UNDEFINED(objectInfo)) throw MakeTypeError("observe_notify_non_notifier"); if (!IS_STRING(changeType)) @@ -570,6 +539,11 @@ function ObjectNotifierPerformChange(changeType, changeFn) { if (!IS_SPEC_FUNCTION(changeFn)) throw MakeTypeError("observe_perform_non_function"); + return %ObjectNotifierPerformChangeInObjectContext( + objectInfo, changeType, changeFn); +} + +function NativeObjectNotifierPerformChange(objectInfo, changeType, changeFn) { ObjectInfoAddPerformingType(objectInfo, changeType); var changeRecord; @@ -586,9 +560,17 @@ function ObjectNotifierPerformChange(changeType, changeFn) { function ObjectGetNotifier(object) { if (!IS_SPEC_OBJECT(object)) throw MakeTypeError("observe_non_object", ["getNotifier"]); + if (%IsJSGlobalProxy(object)) + throw MakeTypeError("observe_global_proxy", ["getNotifier"]); if (ObjectIsFrozen(object)) return null; + if (!%ObjectWasCreatedInCurrentOrigin(object)) return null; + + return %ObjectGetNotifierInObjectContext(object); +} + +function NativeObjectGetNotifier(object) { var objectInfo = ObjectInfoGetOrCreate(object); return ObjectInfoGetNotifier(objectInfo); } diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc index ca025e6cf..7b7c9c9a7 100644 --- a/deps/v8/src/objects-debug.cc +++ b/deps/v8/src/objects-debug.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -38,16 +15,11 @@ namespace internal { #ifdef VERIFY_HEAP -void MaybeObject::Verify() { - Object* this_as_object; - if (ToObject(&this_as_object)) { - if (this_as_object->IsSmi()) { - Smi::cast(this_as_object)->SmiVerify(); - } else { - HeapObject::cast(this_as_object)->HeapObjectVerify(); - } +void Object::ObjectVerify() { + if (IsSmi()) { + Smi::cast(this)->SmiVerify(); } else { - Failure::cast(this)->FailureVerify(); + HeapObject::cast(this)->HeapObjectVerify(); } } @@ -66,11 +38,6 @@ void Smi::SmiVerify() { } -void Failure::FailureVerify() { - CHECK(IsFailure()); -} - - void HeapObject::HeapObjectVerify() { InstanceType instance_type = map()->instance_type(); @@ -165,6 +132,12 @@ void HeapObject::HeapObjectVerify() { case JS_MAP_TYPE: JSMap::cast(this)->JSMapVerify(); break; + case JS_SET_ITERATOR_TYPE: + JSSetIterator::cast(this)->JSSetIteratorVerify(); + break; + case JS_MAP_ITERATOR_TYPE: + JSMapIterator::cast(this)->JSMapIteratorVerify(); + break; case JS_WEAK_MAP_TYPE: JSWeakMap::cast(this)->JSWeakMapVerify(); break; @@ -294,6 +267,12 @@ void JSObject::JSObjectVerify() { if (value->IsUninitialized()) continue; if (r.IsSmi()) ASSERT(value->IsSmi()); if (r.IsHeapObject()) ASSERT(value->IsHeapObject()); + HeapType* field_type = descriptors->GetFieldType(i); + if (r.IsNone()) { + CHECK(field_type->Is(HeapType::None())); + } else if (!HeapType::Any()->Is(field_type)) { + CHECK(!field_type->NowStable() || field_type->NowContains(value)); + } } } } @@ -368,7 +347,6 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() { void TypeFeedbackInfo::TypeFeedbackInfoVerify() { VerifyObjectField(kStorage1Offset); VerifyObjectField(kStorage2Offset); - VerifyHeapPointer(feedback_vector()); } @@ -380,11 +358,7 @@ void AliasedArgumentsEntry::AliasedArgumentsEntryVerify() { void FixedArray::FixedArrayVerify() { for (int i = 0; i < length(); i++) { Object* e = get(i); - if (e->IsHeapObject()) { - VerifyHeapPointer(e); - } else { - e->Verify(); - } + VerifyPointer(e); } } @@ -521,6 +495,7 @@ void ConsString::ConsStringVerify() { CHECK(this->second() == GetHeap()->empty_string() || this->second()->IsString()); CHECK(this->length() >= ConsString::kMinLength); + CHECK(this->length() == this->first()->length() + this->second()->length()); if (this->IsFlat()) { // A flat cons can only be created by String::SlowTryFlatten. // Afterwards, the first part may be externalized. @@ -552,6 +527,7 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() { VerifyObjectField(kNameOffset); VerifyObjectField(kCodeOffset); VerifyObjectField(kOptimizedCodeMapOffset); + VerifyObjectField(kFeedbackVectorOffset); VerifyObjectField(kScopeInfoOffset); VerifyObjectField(kInstanceClassNameOffset); VerifyObjectField(kFunctionDataOffset); @@ -595,18 +571,41 @@ void JSBuiltinsObject::JSBuiltinsObjectVerify() { void Oddball::OddballVerify() { CHECK(IsOddball()); + Heap* heap = GetHeap(); VerifyHeapPointer(to_string()); Object* number = to_number(); if (number->IsHeapObject()) { - CHECK(number == HeapObject::cast(number)->GetHeap()->nan_value()); + CHECK(number == heap->nan_value()); } else { CHECK(number->IsSmi()); int value = Smi::cast(number)->value(); // Hidden oddballs have negative smis. - const int kLeastHiddenOddballNumber = -4; + const int kLeastHiddenOddballNumber = -5; CHECK_LE(value, 1); CHECK(value >= kLeastHiddenOddballNumber); } + if (map() == heap->undefined_map()) { + CHECK(this == heap->undefined_value()); + } else if (map() == heap->the_hole_map()) { + CHECK(this == heap->the_hole_value()); + } else if (map() == heap->null_map()) { + CHECK(this == heap->null_value()); + } else if (map() == heap->boolean_map()) { + CHECK(this == heap->true_value() || + this == heap->false_value()); + } else if (map() == heap->uninitialized_map()) { + CHECK(this == heap->uninitialized_value()); + } else if (map() == heap->no_interceptor_result_sentinel_map()) { + CHECK(this == heap->no_interceptor_result_sentinel()); + } else if (map() == heap->arguments_marker_map()) { + CHECK(this == heap->arguments_marker()); + } else if (map() == heap->termination_exception_map()) { + CHECK(this == heap->termination_exception()); + } else if (map() == heap->exception_map()) { + CHECK(this == heap->exception()); + } else { + UNREACHABLE(); + } } @@ -626,10 +625,11 @@ void PropertyCell::PropertyCellVerify() { void Code::CodeVerify() { CHECK(IsAligned(reinterpret_cast<intptr_t>(instruction_start()), kCodeAlignment)); - relocation_info()->Verify(); + relocation_info()->ObjectVerify(); Address last_gc_pc = NULL; + Isolate* isolate = GetIsolate(); for (RelocIterator it(this); !it.done(); it.next()) { - it.rinfo()->Verify(); + it.rinfo()->Verify(isolate); // Ensure that GC will not iterate twice over the same pointer. if (RelocInfo::IsGCRelocMode(it.rinfo()->rmode())) { CHECK(it.rinfo()->pc() != last_gc_pc); @@ -640,19 +640,25 @@ void Code::CodeVerify() { void Code::VerifyEmbeddedObjectsDependency() { + if (!CanContainWeakObjects()) return; + DisallowHeapAllocation no_gc; + Isolate* isolate = GetIsolate(); + HandleScope scope(isolate); int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { Object* obj = it.rinfo()->target_object(); if (IsWeakObject(obj)) { if (obj->IsMap()) { Map* map = Map::cast(obj); - CHECK(map->dependent_code()->Contains( - DependentCode::kWeaklyEmbeddedGroup, this)); + DependentCode::DependencyGroup group = is_optimized_code() ? + DependentCode::kWeakCodeGroup : DependentCode::kWeakICGroup; + CHECK(map->dependent_code()->Contains(group, this)); } else if (obj->IsJSObject()) { Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table(); WeakHashTable* table = WeakHashTable::cast(raw_table); - CHECK(DependentCode::cast(table->Lookup(obj))->Contains( - DependentCode::kWeaklyEmbeddedGroup, this)); + Handle<Object> key_obj(obj, isolate); + CHECK(DependentCode::cast(table->Lookup(key_obj))->Contains( + DependentCode::kWeakCodeGroup, this)); } } } @@ -676,7 +682,8 @@ void JSSet::JSSetVerify() { CHECK(IsJSSet()); JSObjectVerify(); VerifyHeapPointer(table()); - CHECK(table()->IsHashTable() || table()->IsUndefined()); + CHECK(table()->IsOrderedHashTable() || table()->IsUndefined()); + // TODO(arv): Verify OrderedHashTable too. } @@ -684,7 +691,40 @@ void JSMap::JSMapVerify() { CHECK(IsJSMap()); JSObjectVerify(); VerifyHeapPointer(table()); - CHECK(table()->IsHashTable() || table()->IsUndefined()); + CHECK(table()->IsOrderedHashTable() || table()->IsUndefined()); + // TODO(arv): Verify OrderedHashTable too. +} + + +void JSSetIterator::JSSetIteratorVerify() { + CHECK(IsJSSetIterator()); + JSObjectVerify(); + VerifyHeapPointer(table()); + CHECK(table()->IsOrderedHashTable() || table()->IsUndefined()); + CHECK(index()->IsSmi()); + CHECK(count()->IsSmi()); + CHECK(kind()->IsSmi()); + VerifyHeapPointer(next_iterator()); + CHECK(next_iterator()->IsJSSetIterator() || next_iterator()->IsUndefined()); + VerifyHeapPointer(table()); + CHECK(previous_iterator()->IsJSSetIterator() + || previous_iterator()->IsUndefined()); +} + + +void JSMapIterator::JSMapIteratorVerify() { + CHECK(IsJSMapIterator()); + JSObjectVerify(); + VerifyHeapPointer(table()); + CHECK(table()->IsOrderedHashTable() || table()->IsUndefined()); + CHECK(index()->IsSmi()); + CHECK(count()->IsSmi()); + CHECK(kind()->IsSmi()); + VerifyHeapPointer(next_iterator()); + CHECK(next_iterator()->IsJSMapIterator() || next_iterator()->IsUndefined()); + VerifyHeapPointer(table()); + CHECK(previous_iterator()->IsJSMapIterator() + || previous_iterator()->IsUndefined()); } @@ -811,7 +851,7 @@ void Foreign::ForeignVerify() { void Box::BoxVerify() { CHECK(IsBox()); - value()->Verify(); + value()->ObjectVerify(); } @@ -947,7 +987,7 @@ void Script::ScriptVerify() { void JSFunctionResultCache::JSFunctionResultCacheVerify() { - JSFunction::cast(get(kFactoryIndex))->Verify(); + JSFunction::cast(get(kFactoryIndex))->ObjectVerify(); int size = Smi::cast(get(kCacheSizeIndex))->value(); CHECK(kEntriesIndex <= size); @@ -962,21 +1002,21 @@ void JSFunctionResultCache::JSFunctionResultCacheVerify() { if (FLAG_enable_slow_asserts) { for (int i = kEntriesIndex; i < size; i++) { CHECK(!get(i)->IsTheHole()); - get(i)->Verify(); + get(i)->ObjectVerify(); } for (int i = size; i < length(); i++) { CHECK(get(i)->IsTheHole()); - get(i)->Verify(); + get(i)->ObjectVerify(); } } } void NormalizedMapCache::NormalizedMapCacheVerify() { - FixedArray::cast(this)->Verify(); + FixedArray::cast(this)->FixedArrayVerify(); if (FLAG_enable_slow_asserts) { for (int i = 0; i < length(); i++) { - Object* e = get(i); + Object* e = FixedArray::get(i); if (e->IsMap()) { Map::cast(e)->SharedMapVerify(); } else { @@ -987,7 +1027,6 @@ void NormalizedMapCache::NormalizedMapCacheVerify() { } -#ifdef ENABLE_DEBUGGER_SUPPORT void DebugInfo::DebugInfoVerify() { CHECK(IsDebugInfo()); VerifyPointer(shared()); @@ -1004,7 +1043,6 @@ void BreakPointInfo::BreakPointInfoVerify() { statement_position()->SmiVerify(); VerifyPointer(break_point_objects()); } -#endif // ENABLE_DEBUGGER_SUPPORT #endif // VERIFY_HEAP #ifdef DEBUG diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index 9d550374e..029e80d9b 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // // Review notes: // @@ -87,13 +64,6 @@ PropertyDetails PropertyDetails::AsDeleted() const { } -#define FIXED_TYPED_ARRAY_CAST_ACCESSOR(type) \ - template<> \ - type* type::cast(Object* object) { \ - SLOW_ASSERT(object->Is##type()); \ - return reinterpret_cast<type*>(object); \ - } - #define INT_ACCESSORS(holder, name, offset) \ int holder::name() { return READ_INT_FIELD(this, offset); } \ void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } @@ -125,6 +95,23 @@ PropertyDetails PropertyDetails::AsDeleted() const { WRITE_FIELD(this, offset, Smi::FromInt(value)); \ } +#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \ + int holder::synchronized_##name() { \ + Object* value = ACQUIRE_READ_FIELD(this, offset); \ + return Smi::cast(value)->value(); \ + } \ + void holder::synchronized_set_##name(int value) { \ + RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ + } + +#define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \ + int holder::nobarrier_##name() { \ + Object* value = NOBARRIER_READ_FIELD(this, offset); \ + return Smi::cast(value)->value(); \ + } \ + void holder::nobarrier_set_##name(int value) { \ + NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ + } #define BOOL_GETTER(holder, field, name, offset) \ bool holder::name() { \ @@ -170,12 +157,6 @@ bool Object::IsHeapObject() { } -bool Object::NonFailureIsHeapObject() { - ASSERT(!this->IsFailure()); - return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0; -} - - TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) TYPE_CHECKER(Symbol, SYMBOL_TYPE) @@ -209,6 +190,11 @@ bool Object::IsSpecFunction() { } +bool Object::IsTemplateInfo() { + return IsObjectTemplateInfo() || IsFunctionTemplateInfo(); +} + + bool Object::IsInternalizedString() { if (!this->IsHeapObject()) return false; uint32_t type = HeapObject::cast(this)->map()->instance_type(); @@ -269,6 +255,7 @@ bool Object::IsExternalTwoByteString() { String::cast(this)->IsTwoByteRepresentation(); } + bool Object::HasValidElements() { // Dictionary is covered under FixedArray. return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() || @@ -276,16 +263,17 @@ bool Object::HasValidElements() { } -MaybeObject* Object::AllocateNewStorageFor(Heap* heap, - Representation representation) { - if (representation.IsSmi() && IsUninitialized()) { - return Smi::FromInt(0); +Handle<Object> Object::NewStorageFor(Isolate* isolate, + Handle<Object> object, + Representation representation) { + if (representation.IsSmi() && object->IsUninitialized()) { + return handle(Smi::FromInt(0), isolate); } - if (!representation.IsDouble()) return this; - if (IsUninitialized()) { - return heap->AllocateHeapNumber(0); + if (!representation.IsDouble()) return object; + if (object->IsUninitialized()) { + return isolate->factory()->NewHeapNumber(0); } - return heap->AllocateHeapNumber(Number()); + return isolate->factory()->NewHeapNumber(object->Number()); } @@ -458,13 +446,34 @@ uc32 FlatStringReader::Get(int index) { } +Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) { + return key->AsHandle(isolate); +} + + +Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) { + return key->AsHandle(isolate); +} + + +Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate, + HashTableKey* key) { + return key->AsHandle(isolate); +} + + +Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate, + HashTableKey* key) { + return key->AsHandle(isolate); +} + template <typename Char> class SequentialStringKey : public HashTableKey { public: explicit SequentialStringKey(Vector<const Char> string, uint32_t seed) : string_(string), hash_field_(0), seed_(seed) { } - virtual uint32_t Hash() { + virtual uint32_t Hash() V8_OVERRIDE { hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(), string_.length(), seed_); @@ -475,7 +484,7 @@ class SequentialStringKey : public HashTableKey { } - virtual uint32_t HashForObject(Object* other) { + virtual uint32_t HashForObject(Object* other) V8_OVERRIDE { return String::cast(other)->Hash(); } @@ -490,11 +499,11 @@ class OneByteStringKey : public SequentialStringKey<uint8_t> { OneByteStringKey(Vector<const uint8_t> str, uint32_t seed) : SequentialStringKey<uint8_t>(str, seed) { } - virtual bool IsMatch(Object* string) { + virtual bool IsMatch(Object* string) V8_OVERRIDE { return String::cast(string)->IsOneByteEqualTo(string_); } - virtual MaybeObject* AsObject(Heap* heap); + virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE; }; @@ -509,7 +518,7 @@ class SubStringKey : public HashTableKey { ASSERT(string_->IsSeqString() || string->IsExternalString()); } - virtual uint32_t Hash() { + virtual uint32_t Hash() V8_OVERRIDE { ASSERT(length_ >= 0); ASSERT(from_ + length_ <= string_->length()); const Char* chars = GetChars() + from_; @@ -520,12 +529,12 @@ class SubStringKey : public HashTableKey { return result; } - virtual uint32_t HashForObject(Object* other) { + virtual uint32_t HashForObject(Object* other) V8_OVERRIDE { return String::cast(other)->Hash(); } - virtual bool IsMatch(Object* string); - virtual MaybeObject* AsObject(Heap* heap); + virtual bool IsMatch(Object* string) V8_OVERRIDE; + virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE; private: const Char* GetChars(); @@ -550,11 +559,11 @@ class TwoByteStringKey : public SequentialStringKey<uc16> { explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed) : SequentialStringKey<uc16>(str, seed) { } - virtual bool IsMatch(Object* string) { + virtual bool IsMatch(Object* string) V8_OVERRIDE { return String::cast(string)->IsTwoByteEqualTo(string_); } - virtual MaybeObject* AsObject(Heap* heap); + virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE; }; @@ -564,11 +573,11 @@ class Utf8StringKey : public HashTableKey { explicit Utf8StringKey(Vector<const char> string, uint32_t seed) : string_(string), hash_field_(0), seed_(seed) { } - virtual bool IsMatch(Object* string) { + virtual bool IsMatch(Object* string) V8_OVERRIDE { return String::cast(string)->IsUtf8EqualTo(string_); } - virtual uint32_t Hash() { + virtual uint32_t Hash() V8_OVERRIDE { if (hash_field_ != 0) return hash_field_ >> String::kHashShift; hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_); uint32_t result = hash_field_ >> String::kHashShift; @@ -576,15 +585,14 @@ class Utf8StringKey : public HashTableKey { return result; } - virtual uint32_t HashForObject(Object* other) { + virtual uint32_t HashForObject(Object* other) V8_OVERRIDE { return String::cast(other)->Hash(); } - virtual MaybeObject* AsObject(Heap* heap) { + virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { if (hash_field_ == 0) Hash(); - return heap->AllocateInternalizedStringFromUtf8(string_, - chars_, - hash_field_); + return isolate->factory()->NewInternalizedStringFromUtf8( + string_, chars_, hash_field_); } Vector<const char> string_; @@ -638,38 +646,6 @@ bool Object::IsFixedTypedArrayBase() { } -bool MaybeObject::IsFailure() { - return HAS_FAILURE_TAG(this); -} - - -bool MaybeObject::IsRetryAfterGC() { - return HAS_FAILURE_TAG(this) - && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC; -} - - -bool MaybeObject::IsException() { - return this == Failure::Exception(); -} - - -bool MaybeObject::IsTheHole() { - return !IsFailure() && ToObjectUnchecked()->IsTheHole(); -} - - -bool MaybeObject::IsUninitialized() { - return !IsFailure() && ToObjectUnchecked()->IsUninitialized(); -} - - -Failure* Failure::cast(MaybeObject* obj) { - ASSERT(HAS_FAILURE_TAG(obj)); - return reinterpret_cast<Failure*>(obj); -} - - bool Object::IsJSReceiver() { STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); return IsHeapObject() && @@ -694,6 +670,8 @@ bool Object::IsJSProxy() { TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE) TYPE_CHECKER(JSSet, JS_SET_TYPE) TYPE_CHECKER(JSMap, JS_MAP_TYPE) +TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE) +TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE) TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE) TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE) TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE) @@ -848,8 +826,7 @@ bool Object::IsDictionary() { bool Object::IsStringTable() { - return IsHashTable() && - this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table(); + return IsHashTable(); } @@ -873,13 +850,23 @@ bool Object::IsJSFunctionResultCache() { bool Object::IsNormalizedMapCache() { - if (!IsFixedArray()) return false; - if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) { + return NormalizedMapCache::IsNormalizedMapCache(this); +} + + +int NormalizedMapCache::GetIndex(Handle<Map> map) { + return map->Hash() % NormalizedMapCache::kEntries; +} + + +bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) { + if (!obj->IsFixedArray()) return false; + if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) { return false; } #ifdef VERIFY_HEAP if (FLAG_verify_heap) { - reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify(); + reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify(); } #endif return true; @@ -911,6 +898,13 @@ bool Object::IsObjectHashTable() { } +bool Object::IsOrderedHashTable() { + return IsHeapObject() && + HeapObject::cast(this)->map() == + HeapObject::cast(this)->GetHeap()->ordered_hash_table_map(); +} + + bool Object::IsPrimitive() { return IsOddball() || IsNumber() || IsString(); } @@ -992,6 +986,11 @@ bool Object::IsTheHole() { } +bool Object::IsException() { + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException; +} + + bool Object::IsUninitialized() { return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized; } @@ -1025,8 +1024,8 @@ bool Object::IsNaN() { } -Handle<Object> Object::ToSmi(Isolate* isolate, Handle<Object> object) { - if (object->IsSmi()) return object; +MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) { + if (object->IsSmi()) return Handle<Smi>::cast(object); if (object->IsHeapNumber()) { double value = Handle<HeapNumber>::cast(object)->value(); int int_value = FastD2I(value); @@ -1034,21 +1033,14 @@ Handle<Object> Object::ToSmi(Isolate* isolate, Handle<Object> object) { return handle(Smi::FromInt(int_value), isolate); } } - return Handle<Object>(); + return Handle<Smi>(); } -// TODO(ishell): Use handlified version instead. -MaybeObject* Object::ToSmi() { - if (IsSmi()) return this; - if (IsHeapNumber()) { - double value = HeapNumber::cast(this)->value(); - int int_value = FastD2I(value); - if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { - return Smi::FromInt(int_value); - } - } - return Failure::Exception(); +MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, + Handle<Object> object) { + return ToObject( + isolate, object, handle(isolate->context()->native_context(), isolate)); } @@ -1057,9 +1049,16 @@ bool Object::HasSpecificClassOf(String* name) { } -Handle<Object> Object::GetElement(Isolate* isolate, - Handle<Object> object, - uint32_t index) { +MaybeHandle<Object> Object::GetProperty(Handle<Object> object, + Handle<Name> name) { + PropertyAttributes attributes; + return GetPropertyWithReceiver(object, object, name, &attributes); +} + + +MaybeHandle<Object> Object::GetElement(Isolate* isolate, + Handle<Object> object, + uint32_t index) { // GetElement can trigger a getter which can cause allocation. // This was not always the case. This ASSERT is here to catch // leftover incorrect uses. @@ -1068,24 +1067,52 @@ Handle<Object> Object::GetElement(Isolate* isolate, } -Handle<Object> Object::GetElementNoExceptionThrown(Isolate* isolate, - Handle<Object> object, +MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object, + Handle<Name> name) { + uint32_t index; + Isolate* isolate = name->GetIsolate(); + if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index); + return GetProperty(object, name); +} + + +MaybeHandle<Object> Object::GetProperty(Isolate* isolate, + Handle<Object> object, + const char* name) { + Handle<String> str = isolate->factory()->InternalizeUtf8String(name); + ASSERT(!str.is_null()); +#ifdef DEBUG + uint32_t index; // Assert that the name is not an array index. + ASSERT(!str->AsArrayIndex(&index)); +#endif // DEBUG + return GetProperty(object, str); +} + + +MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy, + Handle<Object> receiver, uint32_t index) { - Handle<Object> result = - Object::GetElementWithReceiver(isolate, object, object, index); - CHECK_NOT_EMPTY_HANDLE(isolate, result); - return result; + return GetPropertyWithHandler( + proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index)); } -MaybeObject* Object::GetProperty(Name* key) { - PropertyAttributes attributes; - return GetPropertyWithReceiver(this, key, &attributes); +MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy, + Handle<JSReceiver> receiver, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode) { + Isolate* isolate = proxy->GetIsolate(); + Handle<String> name = isolate->factory()->Uint32ToString(index); + return SetPropertyWithHandler( + proxy, receiver, name, value, NONE, strict_mode); } -MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) { - return GetPropertyWithReceiver(this, key, attributes); +bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) { + Isolate* isolate = proxy->GetIsolate(); + Handle<String> name = isolate->factory()->Uint32ToString(index); + return HasPropertyWithHandler(proxy, name); } @@ -1095,9 +1122,25 @@ MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) { #define READ_FIELD(p, offset) \ (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) +#define ACQUIRE_READ_FIELD(p, offset) \ + reinterpret_cast<Object*>( \ + Acquire_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)))) + +#define NOBARRIER_READ_FIELD(p, offset) \ + reinterpret_cast<Object*>( \ + NoBarrier_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)))) + #define WRITE_FIELD(p, offset, value) \ (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) +#define RELEASE_WRITE_FIELD(p, offset, value) \ + Release_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \ + reinterpret_cast<AtomicWord>(value)); + +#define NOBARRIER_WRITE_FIELD(p, offset, value) \ + NoBarrier_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \ + reinterpret_cast<AtomicWord>(value)); + #define WRITE_BARRIER(heap, object, offset, value) \ heap->incremental_marking()->RecordWrite( \ object, HeapObject::RawField(object, offset), value); \ @@ -1192,9 +1235,16 @@ MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) { #define READ_BYTE_FIELD(p, offset) \ (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset))) +#define NOBARRIER_READ_BYTE_FIELD(p, offset) \ + static_cast<byte>(NoBarrier_Load( \ + reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset))) ) + #define WRITE_BYTE_FIELD(p, offset, value) \ (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value) +#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \ + NoBarrier_Store(reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset)), \ + static_cast<Atomic8>(value)); Object** HeapObject::RawField(HeapObject* obj, int byte_offset) { return &READ_FIELD(obj, byte_offset); @@ -1219,62 +1269,6 @@ Smi* Smi::FromIntptr(intptr_t value) { } -Failure::Type Failure::type() const { - return static_cast<Type>(value() & kFailureTypeTagMask); -} - - -bool Failure::IsInternalError() const { - return type() == INTERNAL_ERROR; -} - - -AllocationSpace Failure::allocation_space() const { - ASSERT_EQ(RETRY_AFTER_GC, type()); - return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize) - & kSpaceTagMask); -} - - -Failure* Failure::InternalError() { - return Construct(INTERNAL_ERROR); -} - - -Failure* Failure::Exception() { - return Construct(EXCEPTION); -} - - -intptr_t Failure::value() const { - return static_cast<intptr_t>( - reinterpret_cast<uintptr_t>(this) >> kFailureTagSize); -} - - -Failure* Failure::RetryAfterGC() { - return RetryAfterGC(NEW_SPACE); -} - - -Failure* Failure::RetryAfterGC(AllocationSpace space) { - ASSERT((space & ~kSpaceTagMask) == 0); - return Construct(RETRY_AFTER_GC, space); -} - - -Failure* Failure::Construct(Type type, intptr_t value) { - uintptr_t info = - (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type; - ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info); - // Fill the unused bits with a pattern that's easy to recognize in crash - // dumps. - static const int kFailureMagicPattern = 0x0BAD0000; - return reinterpret_cast<Failure*>( - (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern); -} - - bool Smi::IsValid(intptr_t value) { bool result = Internals::IsValidSmi(value); ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue); @@ -1348,6 +1342,26 @@ void HeapObject::set_map(Map* value) { } +Map* HeapObject::synchronized_map() { + return synchronized_map_word().ToMap(); +} + + +void HeapObject::synchronized_set_map(Map* value) { + synchronized_set_map_word(MapWord::FromMap(value)); + if (value != NULL) { + // TODO(1600) We are passing NULL as a slot because maps can never be on + // evacuation candidate. + value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value); + } +} + + +void HeapObject::synchronized_set_map_no_write_barrier(Map* value) { + synchronized_set_map_word(MapWord::FromMap(value)); +} + + // Unsafe accessor omitting write barrier. void HeapObject::set_map_no_write_barrier(Map* value) { set_map_word(MapWord::FromMap(value)); @@ -1355,14 +1369,26 @@ void HeapObject::set_map_no_write_barrier(Map* value) { MapWord HeapObject::map_word() { - return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset))); + return MapWord( + reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset))); } void HeapObject::set_map_word(MapWord map_word) { - // WRITE_FIELD does not invoke write barrier, but there is no need - // here. - WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); + NOBARRIER_WRITE_FIELD( + this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); +} + + +MapWord HeapObject::synchronized_map_word() { + return MapWord( + reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset))); +} + + +void HeapObject::synchronized_set_map_word(MapWord map_word) { + RELEASE_WRITE_FIELD( + this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); } @@ -1444,11 +1470,11 @@ FixedArrayBase* JSObject::elements() { } -void JSObject::ValidateElements() { +void JSObject::ValidateElements(Handle<JSObject> object) { #ifdef ENABLE_SLOW_ASSERTS if (FLAG_enable_slow_asserts) { - ElementsAccessor* accessor = GetElementsAccessor(); - accessor->Validate(this); + ElementsAccessor* accessor = object->GetElementsAccessor(); + accessor->Validate(object); } #endif } @@ -1588,7 +1614,7 @@ inline bool AllocationSite::DigestPretenuringFeedback() { void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) { - object->ValidateElements(); + JSObject::ValidateElements(object); ElementsKind elements_kind = object->map()->elements_kind(); if (!IsFastObjectElementsKind(elements_kind)) { if (IsFastHoleyElementsKind(elements_kind)) { @@ -1676,56 +1702,27 @@ void JSObject::EnsureCanContainElements(Handle<JSObject> object, } -MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate, - ElementsKind to_kind) { - Map* current_map = map(); - ElementsKind from_kind = current_map->elements_kind(); - if (from_kind == to_kind) return current_map; - - Context* native_context = isolate->context()->native_context(); - Object* maybe_array_maps = native_context->js_array_maps(); - if (maybe_array_maps->IsFixedArray()) { - FixedArray* array_maps = FixedArray::cast(maybe_array_maps); - if (array_maps->get(from_kind) == current_map) { - Object* maybe_transitioned_map = array_maps->get(to_kind); - if (maybe_transitioned_map->IsMap()) { - return Map::cast(maybe_transitioned_map); - } - } - } - - return GetElementsTransitionMapSlow(to_kind); +void JSObject::SetMapAndElements(Handle<JSObject> object, + Handle<Map> new_map, + Handle<FixedArrayBase> value) { + JSObject::MigrateToMap(object, new_map); + ASSERT((object->map()->has_fast_smi_or_object_elements() || + (*value == object->GetHeap()->empty_fixed_array())) == + (value->map() == object->GetHeap()->fixed_array_map() || + value->map() == object->GetHeap()->fixed_cow_array_map())); + ASSERT((*value == object->GetHeap()->empty_fixed_array()) || + (object->map()->has_fast_double_elements() == + value->IsFixedDoubleArray())); + object->set_elements(*value); } -void JSObject::set_map_and_elements(Map* new_map, - FixedArrayBase* value, - WriteBarrierMode mode) { - ASSERT(value->HasValidElements()); - if (new_map != NULL) { - if (mode == UPDATE_WRITE_BARRIER) { - set_map(new_map); - } else { - ASSERT(mode == SKIP_WRITE_BARRIER); - set_map_no_write_barrier(new_map); - } - } - ASSERT((map()->has_fast_smi_or_object_elements() || - (value == GetHeap()->empty_fixed_array())) == - (value->map() == GetHeap()->fixed_array_map() || - value->map() == GetHeap()->fixed_cow_array_map())); - ASSERT((value == GetHeap()->empty_fixed_array()) || - (map()->has_fast_double_elements() == value->IsFixedDoubleArray())); +void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { WRITE_FIELD(this, kElementsOffset, value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode); } -void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { - set_map_and_elements(NULL, value, mode); -} - - void JSObject::initialize_properties() { ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); @@ -1733,50 +1730,8 @@ void JSObject::initialize_properties() { void JSObject::initialize_elements() { - if (map()->has_fast_smi_or_object_elements() || - map()->has_fast_double_elements()) { - ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); - WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array()); - } else if (map()->has_external_array_elements()) { - ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map()); - ASSERT(!GetHeap()->InNewSpace(empty_array)); - WRITE_FIELD(this, kElementsOffset, empty_array); - } else if (map()->has_fixed_typed_array_elements()) { - FixedTypedArrayBase* empty_array = - GetHeap()->EmptyFixedTypedArrayForMap(map()); - ASSERT(!GetHeap()->InNewSpace(empty_array)); - WRITE_FIELD(this, kElementsOffset, empty_array); - } else { - UNREACHABLE(); - } -} - - -MaybeObject* JSObject::ResetElements() { - if (map()->is_observed()) { - // Maintain invariant that observed elements are always in dictionary mode. - SeededNumberDictionary* dictionary; - MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0); - if (!maybe->To(&dictionary)) return maybe; - if (map() == GetHeap()->sloppy_arguments_elements_map()) { - FixedArray::cast(elements())->set(1, dictionary); - } else { - set_elements(dictionary); - } - return this; - } - - ElementsKind elements_kind = GetInitialFastElementsKind(); - if (!FLAG_smi_only_arrays) { - elements_kind = FastSmiToObjectElementsKind(elements_kind); - } - MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind); - Map* map; - if (!maybe->To(&map)) return maybe; - set_map(map); - initialize_elements(); - - return this; + FixedArrayBase* elements = map()->GetInitialElements(); + WRITE_FIELD(this, kElementsOffset, elements); } @@ -1887,6 +1842,10 @@ int JSObject::GetHeaderSize() { return JSSet::kSize; case JS_MAP_TYPE: return JSMap::kSize; + case JS_SET_ITERATOR_TYPE: + return JSSetIterator::kSize; + case JS_MAP_ITERATOR_TYPE: + return JSMapIterator::kSize; case JS_WEAK_MAP_TYPE: return JSWeakMap::kSize; case JS_WEAK_SET_TYPE: @@ -1951,13 +1910,6 @@ void JSObject::SetInternalField(int index, Smi* value) { } -MaybeObject* JSObject::FastPropertyAt(Representation representation, - int index) { - Object* raw_value = RawFastPropertyAt(index); - return raw_value->AllocateNewStorageFor(GetHeap(), representation); -} - - // Access fast-case object properties at index. The use of these routines // is needed to correctly distinguish between properties stored in-object and // properties stored in the properties array. @@ -2130,6 +2082,11 @@ Object* FixedArray::get(int index) { } +Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) { + return handle(array->get(index), array->GetIsolate()); +} + + bool FixedArray::is_the_hole(int index) { return get(index) == GetHeap()->the_hole_value(); } @@ -2186,20 +2143,13 @@ int64_t FixedDoubleArray::get_representation(int index) { return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize); } -MaybeObject* FixedDoubleArray::get(int index) { - if (is_the_hole(index)) { - return GetHeap()->the_hole_value(); - } else { - return GetHeap()->NumberFromDouble(get_scalar(index)); - } -} - -Handle<Object> FixedDoubleArray::get_as_handle(int index) { - if (is_the_hole(index)) { - return GetIsolate()->factory()->the_hole_value(); +Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array, + int index) { + if (array->is_the_hole(index)) { + return array->GetIsolate()->factory()->the_hole_value(); } else { - return GetIsolate()->factory()->NewNumber(get_scalar(index)); + return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); } } @@ -2227,12 +2177,30 @@ bool FixedDoubleArray::is_the_hole(int index) { } -SMI_ACCESSORS( - ConstantPoolArray, first_code_ptr_index, kFirstCodePointerIndexOffset) -SMI_ACCESSORS( - ConstantPoolArray, first_heap_ptr_index, kFirstHeapPointerIndexOffset) -SMI_ACCESSORS( - ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset) +double* FixedDoubleArray::data_start() { + return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize)); +} + + +void FixedDoubleArray::FillWithHoles(int from, int to) { + for (int i = from; i < to; i++) { + set_the_hole(i); + } +} + + +void ConstantPoolArray::set_weak_object_state( + ConstantPoolArray::WeakObjectState state) { + int old_layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); + int new_layout_field = WeakObjectStateField::update(old_layout_field, state); + WRITE_INT_FIELD(this, kArrayLayoutOffset, new_layout_field); +} + + +ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() { + int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); + return WeakObjectStateField::decode(layout_field); +} int ConstantPoolArray::first_int64_index() { @@ -2240,6 +2208,27 @@ int ConstantPoolArray::first_int64_index() { } +int ConstantPoolArray::first_code_ptr_index() { + int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); + return first_int64_index() + + NumberOfInt64EntriesField::decode(layout_field); +} + + +int ConstantPoolArray::first_heap_ptr_index() { + int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); + return first_code_ptr_index() + + NumberOfCodePtrEntriesField::decode(layout_field); +} + + +int ConstantPoolArray::first_int32_index() { + int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); + return first_heap_ptr_index() + + NumberOfHeapPtrEntriesField::decode(layout_field); +} + + int ConstantPoolArray::count_of_int64_entries() { return first_code_ptr_index(); } @@ -2260,18 +2249,20 @@ int ConstantPoolArray::count_of_int32_entries() { } -void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries, - int number_of_code_ptr_entries, - int number_of_heap_ptr_entries, - int number_of_int32_entries) { - int current_index = number_of_int64_entries; - set_first_code_ptr_index(current_index); - current_index += number_of_code_ptr_entries; - set_first_heap_ptr_index(current_index); - current_index += number_of_heap_ptr_entries; - set_first_int32_index(current_index); - current_index += number_of_int32_entries; - set_length(current_index); +void ConstantPoolArray::Init(int number_of_int64_entries, + int number_of_code_ptr_entries, + int number_of_heap_ptr_entries, + int number_of_int32_entries) { + set_length(number_of_int64_entries + + number_of_code_ptr_entries + + number_of_heap_ptr_entries + + number_of_int32_entries); + int layout_field = + NumberOfInt64EntriesField::encode(number_of_int64_entries) | + NumberOfCodePtrEntriesField::encode(number_of_code_ptr_entries) | + NumberOfHeapPtrEntriesField::encode(number_of_heap_ptr_entries) | + WeakObjectStateField::encode(NO_WEAK_OBJECTS); + WRITE_INT_FIELD(this, kArrayLayoutOffset, layout_field); } @@ -2420,8 +2411,10 @@ void FixedArray::set_the_hole(int index) { } -double* FixedDoubleArray::data_start() { - return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize)); +void FixedArray::FillWithHoles(int from, int to) { + for (int i = from; i < to; i++) { + set_the_hole(i); + } } @@ -2570,15 +2563,30 @@ void Map::LookupDescriptor(JSObject* holder, void Map::LookupTransition(JSObject* holder, Name* name, LookupResult* result) { - if (HasTransitionArray()) { - TransitionArray* transition_array = transitions(); - int number = transition_array->Search(name); - if (number != TransitionArray::kNotFound) { - return result->TransitionResult( - holder, transition_array->GetTarget(number)); - } + int transition_index = this->SearchTransition(name); + if (transition_index == TransitionArray::kNotFound) return result->NotFound(); + result->TransitionResult(holder, this->GetTransition(transition_index)); +} + + +FixedArrayBase* Map::GetInitialElements() { + if (has_fast_smi_or_object_elements() || + has_fast_double_elements()) { + ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); + return GetHeap()->empty_fixed_array(); + } else if (has_external_array_elements()) { + ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this); + ASSERT(!GetHeap()->InNewSpace(empty_array)); + return empty_array; + } else if (has_fixed_typed_array_elements()) { + FixedTypedArrayBase* empty_array = + GetHeap()->EmptyFixedTypedArrayForMap(this); + ASSERT(!GetHeap()->InNewSpace(empty_array)); + return empty_array; + } else { + UNREACHABLE(); } - result->NotFound(); + return NULL; } @@ -2629,14 +2637,6 @@ void DescriptorArray::SetRepresentation(int descriptor_index, } -void DescriptorArray::InitializeRepresentations(Representation representation) { - int length = number_of_descriptors(); - for (int i = 0; i < length; i++) { - SetRepresentation(i, representation); - } -} - - Object** DescriptorArray::GetValueSlot(int descriptor_number) { ASSERT(descriptor_number < number_of_descriptors()); return RawFieldOfElementAt(ToValueIndex(descriptor_number)); @@ -2649,6 +2649,11 @@ Object* DescriptorArray::GetValue(int descriptor_number) { } +void DescriptorArray::SetValue(int descriptor_index, Object* value) { + set(ToValueIndex(descriptor_index), value); +} + + PropertyDetails DescriptorArray::GetDetails(int descriptor_number) { ASSERT(descriptor_number < number_of_descriptors()); Object* details = get(ToDetailsIndex(descriptor_number)); @@ -2667,6 +2672,12 @@ int DescriptorArray::GetFieldIndex(int descriptor_number) { } +HeapType* DescriptorArray::GetFieldType(int descriptor_number) { + ASSERT(GetDetails(descriptor_number).type() == FIELD); + return HeapType::cast(GetValue(descriptor_number)); +} + + Object* DescriptorArray::GetConstant(int descriptor_number) { return GetValue(descriptor_number); } @@ -2686,8 +2697,8 @@ AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) { void DescriptorArray::Get(int descriptor_number, Descriptor* desc) { - desc->Init(GetKey(descriptor_number), - GetValue(descriptor_number), + desc->Init(handle(GetKey(descriptor_number), GetIsolate()), + handle(GetValue(descriptor_number), GetIsolate()), GetDetails(descriptor_number)); } @@ -2700,10 +2711,10 @@ void DescriptorArray::Set(int descriptor_number, NoIncrementalWriteBarrierSet(this, ToKeyIndex(descriptor_number), - desc->GetKey()); + *desc->GetKey()); NoIncrementalWriteBarrierSet(this, ToValueIndex(descriptor_number), - desc->GetValue()); + *desc->GetValue()); NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi()); @@ -2714,14 +2725,15 @@ void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { // Range check. ASSERT(descriptor_number < number_of_descriptors()); - set(ToKeyIndex(descriptor_number), desc->GetKey()); - set(ToValueIndex(descriptor_number), desc->GetValue()); + set(ToKeyIndex(descriptor_number), *desc->GetKey()); + set(ToValueIndex(descriptor_number), *desc->GetValue()); set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi()); } void DescriptorArray::Append(Descriptor* desc, const WhitenessWitness& witness) { + DisallowHeapAllocation no_gc; int descriptor_number = number_of_descriptors(); SetNumberOfDescriptors(descriptor_number + 1); Set(descriptor_number, desc, witness); @@ -2741,6 +2753,7 @@ void DescriptorArray::Append(Descriptor* desc, void DescriptorArray::Append(Descriptor* desc) { + DisallowHeapAllocation no_gc; int descriptor_number = number_of_descriptors(); SetNumberOfDescriptors(descriptor_number + 1); Set(descriptor_number, desc); @@ -2766,7 +2779,7 @@ void DescriptorArray::SwapSortedKeys(int first, int second) { } -DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array) +DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array) : marking_(array->GetHeap()->incremental_marking()) { marking_->EnterNoMarkingScope(); ASSERT(!marking_->IsMarking() || @@ -2779,8 +2792,8 @@ DescriptorArray::WhitenessWitness::~WhitenessWitness() { } -template<typename Shape, typename Key> -int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) { +template<typename Derived, typename Shape, typename Key> +int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { const int kMinCapacity = 32; int capacity = RoundUpToPowerOf2(at_least_space_for * 2); if (capacity < kMinCapacity) { @@ -2790,17 +2803,17 @@ int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) { } -template<typename Shape, typename Key> -int HashTable<Shape, Key>::FindEntry(Key key) { +template<typename Derived, typename Shape, typename Key> +int HashTable<Derived, Shape, Key>::FindEntry(Key key) { return FindEntry(GetIsolate(), key); } // Find entry for key otherwise return kNotFound. -template<typename Shape, typename Key> -int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) { +template<typename Derived, typename Shape, typename Key> +int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) { uint32_t capacity = Capacity(); - uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity); + uint32_t entry = FirstProbe(HashTable::Hash(key), capacity); uint32_t count = 1; // EnsureCapacity will guarantee the hash table is never full. while (true) { @@ -2893,6 +2906,8 @@ CAST_ACCESSOR(JSProxy) CAST_ACCESSOR(JSFunctionProxy) CAST_ACCESSOR(JSSet) CAST_ACCESSOR(JSMap) +CAST_ACCESSOR(JSSetIterator) +CAST_ACCESSOR(JSMapIterator) CAST_ACCESSOR(JSWeakMap) CAST_ACCESSOR(JSWeakSet) CAST_ACCESSOR(Foreign) @@ -2925,17 +2940,22 @@ FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) { #undef MAKE_STRUCT_CAST -template <typename Shape, typename Key> -HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) { +template <typename Derived, typename Shape, typename Key> +HashTable<Derived, Shape, Key>* +HashTable<Derived, Shape, Key>::cast(Object* obj) { ASSERT(obj->IsHashTable()); return reinterpret_cast<HashTable*>(obj); } SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) +SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) + SMI_ACCESSORS(FreeSpace, size, kSizeOffset) +NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset) SMI_ACCESSORS(String, length, kLengthOffset) +SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) uint32_t Name::hash_field() { @@ -2961,6 +2981,17 @@ bool Name::Equals(Name* other) { } +bool Name::Equals(Handle<Name> one, Handle<Name> two) { + if (one.is_identical_to(two)) return true; + if ((one->IsInternalizedString() && two->IsInternalizedString()) || + one->IsSymbol() || two->IsSymbol()) { + return false; + } + return String::SlowEquals(Handle<String>::cast(one), + Handle<String>::cast(two)); +} + + ACCESSORS(Symbol, name, Object, kNameOffset) ACCESSORS(Symbol, flags, Smi, kFlagsOffset) BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit) @@ -2975,19 +3006,20 @@ bool String::Equals(String* other) { } -MaybeObject* String::TryFlatten(PretenureFlag pretenure) { - if (!StringShape(this).IsCons()) return this; - ConsString* cons = ConsString::cast(this); - if (cons->IsFlat()) return cons->first(); - return SlowTryFlatten(pretenure); +bool String::Equals(Handle<String> one, Handle<String> two) { + if (one.is_identical_to(two)) return true; + if (one->IsInternalizedString() && two->IsInternalizedString()) { + return false; + } + return SlowEquals(one, two); } -String* String::TryFlattenGetString(PretenureFlag pretenure) { - MaybeObject* flat = TryFlatten(pretenure); - Object* successfully_flattened; - if (!flat->ToObject(&successfully_flattened)) return this; - return String::cast(successfully_flattened); +Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) { + if (!string->IsConsString()) return string; + Handle<ConsString> cons = Handle<ConsString>::cast(string); + if (cons->IsFlat()) return handle(cons->first()); + return SlowFlatten(cons, pretenure); } @@ -3044,96 +3076,60 @@ String* String::GetUnderlying() { } -template<class Visitor, class ConsOp> -void String::Visit( - String* string, - unsigned offset, - Visitor& visitor, - ConsOp& cons_op, - int32_t type, - unsigned length) { - ASSERT(length == static_cast<unsigned>(string->length())); +template<class Visitor> +ConsString* String::VisitFlat(Visitor* visitor, + String* string, + const int offset) { + int slice_offset = offset; + const int length = string->length(); ASSERT(offset <= length); - unsigned slice_offset = offset; while (true) { - ASSERT(type == string->map()->instance_type()); - + int32_t type = string->map()->instance_type(); switch (type & (kStringRepresentationMask | kStringEncodingMask)) { case kSeqStringTag | kOneByteStringTag: - visitor.VisitOneByteString( + visitor->VisitOneByteString( SeqOneByteString::cast(string)->GetChars() + slice_offset, length - offset); - return; + return NULL; case kSeqStringTag | kTwoByteStringTag: - visitor.VisitTwoByteString( + visitor->VisitTwoByteString( SeqTwoByteString::cast(string)->GetChars() + slice_offset, length - offset); - return; + return NULL; case kExternalStringTag | kOneByteStringTag: - visitor.VisitOneByteString( + visitor->VisitOneByteString( ExternalAsciiString::cast(string)->GetChars() + slice_offset, length - offset); - return; + return NULL; case kExternalStringTag | kTwoByteStringTag: - visitor.VisitTwoByteString( + visitor->VisitTwoByteString( ExternalTwoByteString::cast(string)->GetChars() + slice_offset, length - offset); - return; + return NULL; case kSlicedStringTag | kOneByteStringTag: case kSlicedStringTag | kTwoByteStringTag: { SlicedString* slicedString = SlicedString::cast(string); slice_offset += slicedString->offset(); string = slicedString->parent(); - type = string->map()->instance_type(); continue; } case kConsStringTag | kOneByteStringTag: case kConsStringTag | kTwoByteStringTag: - string = cons_op.Operate(string, &offset, &type, &length); - if (string == NULL) return; - slice_offset = offset; - ASSERT(length == static_cast<unsigned>(string->length())); - continue; + return ConsString::cast(string); default: UNREACHABLE(); - return; + return NULL; } } } -// TODO(dcarney): Remove this class after conversion to VisitFlat. -class ConsStringCaptureOp { - public: - inline ConsStringCaptureOp() : cons_string_(NULL) {} - inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) { - cons_string_ = ConsString::cast(string); - return NULL; - } - ConsString* cons_string_; -}; - - -template<class Visitor> -ConsString* String::VisitFlat(Visitor* visitor, - String* string, - int offset, - int length, - int32_t type) { - ASSERT(length >= 0 && length == string->length()); - ASSERT(offset >= 0 && offset <= length); - ConsStringCaptureOp op; - Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length)); - return op.cons_string_; -} - - uint16_t SeqOneByteString::SeqOneByteStringGet(int index) { ASSERT(index >= 0 && index < length()); return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); @@ -3313,12 +3309,7 @@ const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData( } -String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) { - return NULL; -} - - -unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) { +int ConsStringIteratorOp::OffsetForDepth(int depth) { return depth & kDepthMask; } @@ -3346,45 +3337,9 @@ void ConsStringIteratorOp::Pop() { } -bool ConsStringIteratorOp::HasMore() { - return depth_ != 0; -} - - -void ConsStringIteratorOp::Reset() { - depth_ = 0; -} - - -String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out, - unsigned* length_out) { - bool blew_stack = false; - String* string = NextLeaf(&blew_stack, type_out, length_out); - // String found. - if (string != NULL) { - // Verify output. - ASSERT(*length_out == static_cast<unsigned>(string->length())); - ASSERT(*type_out == string->map()->instance_type()); - return string; - } - // Traversal complete. - if (!blew_stack) return NULL; - // Restart search from root. - unsigned offset_out; - string = Search(&offset_out, type_out, length_out); - // Verify output. - ASSERT(string == NULL || offset_out == 0); - ASSERT(string == NULL || - *length_out == static_cast<unsigned>(string->length())); - ASSERT(string == NULL || *type_out == string->map()->instance_type()); - return string; -} - - uint16_t StringCharacterStream::GetNext() { ASSERT(buffer8_ != NULL && end_ != NULL); // Advance cursor if needed. - // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this. if (buffer8_ == end_) HasMore(); ASSERT(buffer8_ < end_); return is_one_byte_ ? *buffer8_++ : *buffer16_++; @@ -3393,41 +3348,39 @@ uint16_t StringCharacterStream::GetNext() { StringCharacterStream::StringCharacterStream(String* string, ConsStringIteratorOp* op, - unsigned offset) + int offset) : is_one_byte_(false), op_(op) { Reset(string, offset); } -void StringCharacterStream::Reset(String* string, unsigned offset) { - op_->Reset(); +void StringCharacterStream::Reset(String* string, int offset) { buffer8_ = NULL; end_ = NULL; - int32_t type = string->map()->instance_type(); - unsigned length = string->length(); - String::Visit(string, offset, *this, *op_, type, length); + ConsString* cons_string = String::VisitFlat(this, string, offset); + op_->Reset(cons_string, offset); + if (cons_string != NULL) { + string = op_->Next(&offset); + if (string != NULL) String::VisitFlat(this, string, offset); + } } bool StringCharacterStream::HasMore() { if (buffer8_ != end_) return true; - if (!op_->HasMore()) return false; - unsigned length; - int32_t type; - String* string = op_->ContinueOperation(&type, &length); + int offset; + String* string = op_->Next(&offset); + ASSERT_EQ(offset, 0); if (string == NULL) return false; - ASSERT(!string->IsConsString()); - ASSERT(string->length() != 0); - ConsStringNullOp null_op; - String::Visit(string, 0, *this, null_op, type, length); + String::VisitFlat(this, string); ASSERT(buffer8_ != end_); return true; } void StringCharacterStream::VisitOneByteString( - const uint8_t* chars, unsigned length) { + const uint8_t* chars, int length) { is_one_byte_ = true; buffer8_ = chars; end_ = chars + length; @@ -3435,7 +3388,7 @@ void StringCharacterStream::VisitOneByteString( void StringCharacterStream::VisitTwoByteString( - const uint16_t* chars, unsigned length) { + const uint16_t* chars, int length) { is_one_byte_ = false; buffer16_ = chars; end_ = reinterpret_cast<const uint8_t*>(chars + length); @@ -3519,8 +3472,11 @@ uint8_t ExternalUint8ClampedArray::get_scalar(int index) { } -MaybeObject* ExternalUint8ClampedArray::get(int index) { - return Smi::FromInt(static_cast<int>(get_scalar(index))); +Handle<Object> ExternalUint8ClampedArray::get( + Handle<ExternalUint8ClampedArray> array, + int index) { + return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), + array->GetIsolate()); } @@ -3550,8 +3506,10 @@ int8_t ExternalInt8Array::get_scalar(int index) { } -MaybeObject* ExternalInt8Array::get(int index) { - return Smi::FromInt(static_cast<int>(get_scalar(index))); +Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array, + int index) { + return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), + array->GetIsolate()); } @@ -3569,8 +3527,10 @@ uint8_t ExternalUint8Array::get_scalar(int index) { } -MaybeObject* ExternalUint8Array::get(int index) { - return Smi::FromInt(static_cast<int>(get_scalar(index))); +Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array, + int index) { + return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), + array->GetIsolate()); } @@ -3588,8 +3548,10 @@ int16_t ExternalInt16Array::get_scalar(int index) { } -MaybeObject* ExternalInt16Array::get(int index) { - return Smi::FromInt(static_cast<int>(get_scalar(index))); +Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array, + int index) { + return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), + array->GetIsolate()); } @@ -3607,8 +3569,10 @@ uint16_t ExternalUint16Array::get_scalar(int index) { } -MaybeObject* ExternalUint16Array::get(int index) { - return Smi::FromInt(static_cast<int>(get_scalar(index))); +Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array, + int index) { + return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), + array->GetIsolate()); } @@ -3626,8 +3590,10 @@ int32_t ExternalInt32Array::get_scalar(int index) { } -MaybeObject* ExternalInt32Array::get(int index) { - return GetHeap()->NumberFromInt32(get_scalar(index)); +Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array, + int index) { + return array->GetIsolate()->factory()-> + NewNumberFromInt(array->get_scalar(index)); } @@ -3645,8 +3611,10 @@ uint32_t ExternalUint32Array::get_scalar(int index) { } -MaybeObject* ExternalUint32Array::get(int index) { - return GetHeap()->NumberFromUint32(get_scalar(index)); +Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array, + int index) { + return array->GetIsolate()->factory()-> + NewNumberFromUint(array->get_scalar(index)); } @@ -3664,8 +3632,9 @@ float ExternalFloat32Array::get_scalar(int index) { } -MaybeObject* ExternalFloat32Array::get(int index) { - return GetHeap()->NumberFromDouble(get_scalar(index)); +Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array, + int index) { + return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); } @@ -3683,8 +3652,9 @@ double ExternalFloat64Array::get_scalar(int index) { } -MaybeObject* ExternalFloat64Array::get(int index) { - return GetHeap()->NumberFromDouble(get_scalar(index)); +Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array, + int index) { + return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); } @@ -3829,83 +3799,80 @@ double FixedTypedArray<Float64ArrayTraits>::from_double(double value) { template <class Traits> -MaybeObject* FixedTypedArray<Traits>::get(int index) { - return Traits::ToObject(GetHeap(), get_scalar(index)); +Handle<Object> FixedTypedArray<Traits>::get( + Handle<FixedTypedArray<Traits> > array, + int index) { + return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index)); } + template <class Traits> -MaybeObject* FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) { +Handle<Object> FixedTypedArray<Traits>::SetValue( + Handle<FixedTypedArray<Traits> > array, + uint32_t index, + Handle<Object> value) { ElementType cast_value = Traits::defaultValue(); - if (index < static_cast<uint32_t>(length())) { + if (index < static_cast<uint32_t>(array->length())) { if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); + int int_value = Handle<Smi>::cast(value)->value(); cast_value = from_int(int_value); } else if (value->IsHeapNumber()) { - double double_value = HeapNumber::cast(value)->value(); + double double_value = Handle<HeapNumber>::cast(value)->value(); cast_value = from_double(double_value); } else { // Clamp undefined to the default value. All other types have been // converted to a number type further up in the call chain. ASSERT(value->IsUndefined()); } - set(index, cast_value); + array->set(index, cast_value); } - return Traits::ToObject(GetHeap(), cast_value); -} - -template <class Traits> -Handle<Object> FixedTypedArray<Traits>::SetValue( - Handle<FixedTypedArray<Traits> > array, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); + return Traits::ToHandle(array->GetIsolate(), cast_value); } -MaybeObject* Uint8ArrayTraits::ToObject(Heap*, uint8_t scalar) { - return Smi::FromInt(scalar); +Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) { + return handle(Smi::FromInt(scalar), isolate); } -MaybeObject* Uint8ClampedArrayTraits::ToObject(Heap*, uint8_t scalar) { - return Smi::FromInt(scalar); +Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate, + uint8_t scalar) { + return handle(Smi::FromInt(scalar), isolate); } -MaybeObject* Int8ArrayTraits::ToObject(Heap*, int8_t scalar) { - return Smi::FromInt(scalar); +Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) { + return handle(Smi::FromInt(scalar), isolate); } -MaybeObject* Uint16ArrayTraits::ToObject(Heap*, uint16_t scalar) { - return Smi::FromInt(scalar); +Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) { + return handle(Smi::FromInt(scalar), isolate); } -MaybeObject* Int16ArrayTraits::ToObject(Heap*, int16_t scalar) { - return Smi::FromInt(scalar); +Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) { + return handle(Smi::FromInt(scalar), isolate); } -MaybeObject* Uint32ArrayTraits::ToObject(Heap* heap, uint32_t scalar) { - return heap->NumberFromUint32(scalar); +Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) { + return isolate->factory()->NewNumberFromUint(scalar); } -MaybeObject* Int32ArrayTraits::ToObject(Heap* heap, int32_t scalar) { - return heap->NumberFromInt32(scalar); +Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) { + return isolate->factory()->NewNumberFromInt(scalar); } -MaybeObject* Float32ArrayTraits::ToObject(Heap* heap, float scalar) { - return heap->NumberFromDouble(scalar); +Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) { + return isolate->factory()->NewNumber(scalar); } -MaybeObject* Float64ArrayTraits::ToObject(Heap* heap, double scalar) { - return heap->NumberFromDouble(scalar); +Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) { + return isolate->factory()->NewNumber(scalar); } @@ -3921,7 +3888,8 @@ void Map::set_visitor_id(int id) { int Map::instance_size() { - return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2; + return NOBARRIER_READ_BYTE_FIELD( + this, kInstanceSizeOffset) << kPointerSizeLog2; } @@ -3960,7 +3928,7 @@ int HeapObject::SizeFromMap(Map* map) { return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); } if (instance_type == FREE_SPACE_TYPE) { - return reinterpret_cast<FreeSpace*>(this)->size(); + return reinterpret_cast<FreeSpace*>(this)->nobarrier_size(); } if (instance_type == STRING_TYPE || instance_type == INTERNALIZED_STRING_TYPE) { @@ -3991,7 +3959,8 @@ void Map::set_instance_size(int value) { ASSERT_EQ(0, value & (kPointerSize - 1)); value >>= kPointerSizeLog2; ASSERT(0 <= value && value < 256); - WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value)); + NOBARRIER_WRITE_BYTE_FIELD( + this, kInstanceSizeOffset, static_cast<byte>(value)); } @@ -4377,6 +4346,7 @@ bool Code::has_major_key() { kind() == LOAD_IC || kind() == KEYED_LOAD_IC || kind() == STORE_IC || + kind() == CALL_IC || kind() == KEYED_STORE_IC || kind() == TO_BOOLEAN_IC; } @@ -4559,12 +4529,41 @@ bool Code::marked_for_deoptimization() { void Code::set_marked_for_deoptimization(bool flag) { ASSERT(kind() == OPTIMIZED_FUNCTION); + ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate())); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); int updated = MarkedForDeoptimizationField::update(previous, flag); WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); } +bool Code::is_weak_stub() { + return CanBeWeakStub() && WeakStubField::decode( + READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); +} + + +void Code::mark_as_weak_stub() { + ASSERT(CanBeWeakStub()); + int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); + int updated = WeakStubField::update(previous, true); + WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); +} + + +bool Code::is_invalidated_weak_stub() { + return is_weak_stub() && InvalidatedWeakStubField::decode( + READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); +} + + +void Code::mark_as_invalidated_weak_stub() { + ASSERT(is_inline_cache_stub()); + int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); + int updated = InvalidatedWeakStubField::update(previous, true); + WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); +} + + bool Code::is_inline_cache_stub() { Kind kind = this->kind(); switch (kind) { @@ -4677,10 +4676,9 @@ Object* Code::GetObjectFromEntryAddress(Address location_of_address) { bool Code::IsWeakObjectInOptimizedCode(Object* object) { - ASSERT(is_optimized_code()); + if (!FLAG_collect_maps) return false; if (object->IsMap()) { return Map::cast(object)->CanTransition() && - FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code; } if (object->IsJSObject() || @@ -4709,6 +4707,13 @@ class Code::FindAndReplacePattern { }; +bool Code::IsWeakObjectInIC(Object* object) { + return object->IsMap() && Map::cast(object)->CanTransition() && + FLAG_collect_maps && + FLAG_weak_embedded_maps_in_ic; +} + + Object* Map::prototype() { return READ_FIELD(this, kPrototypeOffset); } @@ -4723,21 +4728,17 @@ void Map::set_prototype(Object* value, WriteBarrierMode mode) { // If the descriptor is using the empty transition array, install a new empty // transition array that will have place for an element transition. -static MaybeObject* EnsureHasTransitionArray(Map* map) { - TransitionArray* transitions; - MaybeObject* maybe_transitions; +static void EnsureHasTransitionArray(Handle<Map> map) { + Handle<TransitionArray> transitions; if (!map->HasTransitionArray()) { - maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0); - if (!maybe_transitions->To(&transitions)) return maybe_transitions; + transitions = TransitionArray::Allocate(map->GetIsolate(), 0); transitions->set_back_pointer_storage(map->GetBackPointer()); } else if (!map->transitions()->IsFullTransitionArray()) { - maybe_transitions = map->transitions()->ExtendToFullTransitionArray(); - if (!maybe_transitions->To(&transitions)) return maybe_transitions; + transitions = TransitionArray::ExtendToFullTransitionArray(map); } else { - return map; + return; } - map->set_transitions(transitions); - return transitions; + map->set_transitions(*transitions); } @@ -4765,25 +4766,11 @@ uint32_t Map::bit_field3() { } -void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) { - Object* back_pointer = GetBackPointer(); - - if (Heap::ShouldZapGarbage() && HasTransitionArray()) { - ZapTransitions(); - } - - WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer); - CONDITIONAL_WRITE_BARRIER( - heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode); -} - - -void Map::AppendDescriptor(Descriptor* desc, - const DescriptorArray::WhitenessWitness& witness) { +void Map::AppendDescriptor(Descriptor* desc) { DescriptorArray* descriptors = instance_descriptors(); int number_of_own_descriptors = NumberOfOwnDescriptors(); ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); - descriptors->Append(desc, witness); + descriptors->Append(desc); SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); } @@ -4824,33 +4811,14 @@ bool Map::CanHaveMoreTransitions() { } -MaybeObject* Map::AddTransition(Name* key, - Map* target, - SimpleTransitionFlag flag) { - if (HasTransitionArray()) return transitions()->CopyInsert(key, target); - return TransitionArray::NewWith(flag, key, target, GetBackPointer()); -} - - -void Map::SetTransition(int transition_index, Map* target) { - transitions()->SetTarget(transition_index, target); -} - - Map* Map::GetTransition(int transition_index) { return transitions()->GetTarget(transition_index); } -MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) { - TransitionArray* transitions; - MaybeObject* maybe_transitions = AddTransition( - GetHeap()->elements_transition_symbol(), - transitioned_map, - FULL_TRANSITION); - if (!maybe_transitions->To(&transitions)) return maybe_transitions; - set_transitions(transitions); - return transitions; +int Map::SearchTransition(Name* name) { + if (HasTransitionArray()) return transitions()->Search(name); + return TransitionArray::kNotFound; } @@ -4863,19 +4831,18 @@ FixedArray* Map::GetPrototypeTransitions() { } -MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) { - MaybeObject* allow_prototype = EnsureHasTransitionArray(this); - if (allow_prototype->IsFailure()) return allow_prototype; - int old_number_of_transitions = NumberOfProtoTransitions(); +void Map::SetPrototypeTransitions( + Handle<Map> map, Handle<FixedArray> proto_transitions) { + EnsureHasTransitionArray(map); + int old_number_of_transitions = map->NumberOfProtoTransitions(); #ifdef DEBUG - if (HasPrototypeTransitions()) { - ASSERT(GetPrototypeTransitions() != proto_transitions); - ZapPrototypeTransitions(); + if (map->HasPrototypeTransitions()) { + ASSERT(map->GetPrototypeTransitions() != *proto_transitions); + map->ZapPrototypeTransitions(); } #endif - transitions()->SetPrototypeTransitions(proto_transitions); - SetNumberOfProtoTransitions(old_number_of_transitions); - return this; + map->transitions()->SetPrototypeTransitions(*proto_transitions); + map->SetNumberOfProtoTransitions(old_number_of_transitions); } @@ -4940,23 +4907,6 @@ void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { } -// Can either be Smi (no transitions), normal transition array, or a transition -// array with the header overwritten as a Smi (thus iterating). -TransitionArray* Map::unchecked_transition_array() { - Object* object = *HeapObject::RawField(this, - Map::kTransitionsOrBackPointerOffset); - TransitionArray* transition_array = static_cast<TransitionArray*>(object); - return transition_array; -} - - -HeapObject* Map::UncheckedPrototypeTransitions() { - ASSERT(HasTransitionArray()); - ASSERT(unchecked_transition_array()->HasPrototypeTransitions()); - return unchecked_transition_array()->UncheckedPrototypeTransitions(); -} - - ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS(Map, constructor, Object, kConstructorOffset) @@ -4971,6 +4921,7 @@ ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset) +ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset) ACCESSORS(AccessorInfo, name, Object, kNameOffset) ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset) @@ -5082,7 +5033,6 @@ void Script::set_compilation_state(CompilationState state) { } -#ifdef ENABLE_DEBUGGER_SUPPORT ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex) ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex) ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) @@ -5092,12 +5042,13 @@ ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex) ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) -#endif ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) ACCESSORS(SharedFunctionInfo, optimized_code_map, Object, kOptimizedCodeMapOffset) ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) +ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray, + kFeedbackVectorOffset) ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) ACCESSORS(SharedFunctionInfo, instance_class_name, Object, kInstanceClassNameOffset) @@ -5105,7 +5056,6 @@ ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) -SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset) @@ -5160,6 +5110,8 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints, SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset) +SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) +SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset) #else @@ -5210,9 +5162,15 @@ PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) - PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset) +PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, + ast_node_count, + kAstNodeCountOffset) +PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, + profiler_ticks, + kProfilerTicksOffset) + #endif @@ -5256,12 +5214,6 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) { } -int SharedFunctionInfo::profiler_ticks() { - if (code()->kind() != Code::FUNCTION) return 0; - return code()->profiler_ticks(); -} - - StrictMode SharedFunctionInfo::strict_mode() { return BooleanBit::get(compiler_hints(), kStrictModeFunction) ? STRICT : SLOPPY; @@ -5355,6 +5307,7 @@ void SharedFunctionInfo::ReplaceCode(Code* value) { } ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL); + set_code(value); } @@ -5712,6 +5665,32 @@ void JSProxy::InitializeBody(int object_size, Object* value) { ACCESSORS(JSSet, table, Object, kTableOffset) ACCESSORS(JSMap, table, Object, kTableOffset) + + +#define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \ + template<class Derived, class TableType> \ + type* OrderedHashTableIterator<Derived, TableType>::name() { \ + return type::cast(READ_FIELD(this, offset)); \ + } \ + template<class Derived, class TableType> \ + void OrderedHashTableIterator<Derived, TableType>::set_##name( \ + type* value, WriteBarrierMode mode) { \ + WRITE_FIELD(this, offset, value); \ + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \ + } + +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset) +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset) +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(count, Smi, kCountOffset) +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset) +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(next_iterator, Object, + kNextIteratorOffset) +ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(previous_iterator, Object, + kPreviousIteratorOffset) + +#undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS + + ACCESSORS(JSWeakCollection, table, Object, kTableOffset) ACCESSORS(JSWeakCollection, next, Object, kNextOffset) @@ -5733,6 +5712,11 @@ SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset) ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset) SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset) +bool JSGeneratorObject::is_suspended() { + ASSERT_LT(kGeneratorExecuting, kGeneratorClosed); + ASSERT_EQ(kGeneratorClosed, 0); + return continuation() > 0; +} JSGeneratorObject* JSGeneratorObject::cast(Object* obj) { ASSERT(obj->IsJSGeneratorObject()); @@ -5832,7 +5816,7 @@ void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) { int Code::stub_info() { ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || - kind() == BINARY_OP_IC || kind() == LOAD_IC); + kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC); return Smi::cast(raw_type_feedback_info())->value(); } @@ -5843,6 +5827,7 @@ void Code::set_stub_info(int value) { kind() == BINARY_OP_IC || kind() == STUB || kind() == LOAD_IC || + kind() == CALL_IC || kind() == KEYED_LOAD_IC || kind() == STORE_IC || kind() == KEYED_STORE_IC); @@ -6121,24 +6106,6 @@ bool JSObject::HasIndexedInterceptor() { } -MaybeObject* JSObject::EnsureWritableFastElements() { - ASSERT(HasFastSmiOrObjectElements()); - FixedArray* elems = FixedArray::cast(elements()); - Isolate* isolate = GetIsolate(); - if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; - Object* writable_elems; - { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap( - elems, isolate->heap()->fixed_array_map()); - if (!maybe_writable_elems->ToObject(&writable_elems)) { - return maybe_writable_elems; - } - } - set_elements(FixedArray::cast(writable_elems)); - isolate->counters()->cow_arrays_converted()->Increment(); - return writable_elems; -} - - NameDictionary* JSObject::property_dictionary() { ASSERT(!HasFastProperties()); return NameDictionary::cast(properties()); @@ -6151,6 +6118,20 @@ SeededNumberDictionary* JSObject::element_dictionary() { } +Handle<JSSetIterator> JSSetIterator::Create( + Handle<OrderedHashSet> table, + int kind) { + return CreateInternal(table->GetIsolate()->set_iterator_map(), table, kind); +} + + +Handle<JSMapIterator> JSMapIterator::Create( + Handle<OrderedHashMap> table, + int kind) { + return CreateInternal(table->GetIsolate()->map_iterator_map(), table, kind); +} + + bool Name::IsHashFieldComputed(uint32_t field) { return (field & kHashNotComputedMask) == 0; } @@ -6461,27 +6442,27 @@ bool AccessorPair::prohibits_overwriting() { } -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::SetEntry(int entry, - Object* key, - Object* value) { +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::SetEntry(int entry, + Handle<Object> key, + Handle<Object> value) { SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0))); } -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::SetEntry(int entry, - Object* key, - Object* value, - PropertyDetails details) { +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::SetEntry(int entry, + Handle<Object> key, + Handle<Object> value, + PropertyDetails details) { ASSERT(!key->IsName() || details.IsDeleted() || details.dictionary_index() > 0); - int index = HashTable<Shape, Key>::EntryToIndex(entry); + int index = DerivedHashTable::EntryToIndex(entry); DisallowHeapAllocation no_gc; WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); - FixedArray::set(index, key, mode); - FixedArray::set(index+1, value, mode); + FixedArray::set(index, *key, mode); + FixedArray::set(index+1, *value, mode); FixedArray::set(index+2, details.AsSmi()); } @@ -6503,10 +6484,12 @@ uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key, return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0); } + uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) { return ComputeIntegerHash(key, seed); } + uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key, uint32_t seed, Object* other) { @@ -6514,12 +6497,13 @@ uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key, return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed); } -MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) { - return heap->NumberFromUint32(key); + +Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) { + return isolate->factory()->NewNumberFromUint(key); } -bool NameDictionaryShape::IsMatch(Name* key, Object* other) { +bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) { // We know that all entries in a hash table had their hash keys created. // Use that knowledge to have fast failure. if (key->Hash() != Name::cast(other)->Hash()) return false; @@ -6527,63 +6511,72 @@ bool NameDictionaryShape::IsMatch(Name* key, Object* other) { } -uint32_t NameDictionaryShape::Hash(Name* key) { +uint32_t NameDictionaryShape::Hash(Handle<Name> key) { return key->Hash(); } -uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) { +uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) { return Name::cast(other)->Hash(); } -MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) { +Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate, + Handle<Name> key) { ASSERT(key->IsUniqueName()); return key; } -template <int entrysize> -bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) { +void NameDictionary::DoGenerateNewEnumerationIndices( + Handle<NameDictionary> dictionary) { + DerivedDictionary::GenerateNewEnumerationIndices(dictionary); +} + + +bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) { return key->SameValue(other); } -template <int entrysize> -uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) { +uint32_t ObjectHashTableShape::Hash(Handle<Object> key) { return Smi::cast(key->GetHash())->value(); } -template <int entrysize> -uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key, - Object* other) { +uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key, + Object* other) { return Smi::cast(other->GetHash())->value(); } -template <int entrysize> -MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap, - Object* key) { +Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate, + Handle<Object> key) { return key; } +Handle<ObjectHashTable> ObjectHashTable::Shrink( + Handle<ObjectHashTable> table, Handle<Object> key) { + return DerivedHashTable::Shrink(table, key); +} + + template <int entrysize> -bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) { +bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) { return key->SameValue(other); } template <int entrysize> -uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) { - intptr_t hash = reinterpret_cast<intptr_t>(key); +uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) { + intptr_t hash = reinterpret_cast<intptr_t>(*key); return (uint32_t)(hash & 0xFFFFFFFF); } template <int entrysize> -uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key, +uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key, Object* other) { intptr_t hash = reinterpret_cast<intptr_t>(other); return (uint32_t)(hash & 0xFFFFFFFF); @@ -6591,8 +6584,8 @@ uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key, template <int entrysize> -MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap, - Object* key) { +Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate, + Handle<Object> key) { return key; } @@ -6654,24 +6647,6 @@ void JSArray::SetContent(Handle<JSArray> array, } -MaybeObject* FixedArray::Copy() { - if (length() == 0) return this; - return GetHeap()->CopyFixedArray(this); -} - - -MaybeObject* FixedDoubleArray::Copy() { - if (length() == 0) return this; - return GetHeap()->CopyFixedDoubleArray(this); -} - - -MaybeObject* ConstantPoolArray::Copy() { - if (length() == 0) return this; - return GetHeap()->CopyConstantPoolArray(this); -} - - Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) { return isolate->factory()->uninitialized_symbol(); } @@ -6771,10 +6746,6 @@ bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) { } -ACCESSORS(TypeFeedbackInfo, feedback_vector, FixedArray, - kFeedbackVectorOffset) - - SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot) @@ -6863,11 +6834,15 @@ void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, #undef ACCESSORS #undef ACCESSORS_TO_SMI #undef SMI_ACCESSORS +#undef SYNCHRONIZED_SMI_ACCESSORS +#undef NOBARRIER_SMI_ACCESSORS #undef BOOL_GETTER #undef BOOL_ACCESSORS #undef FIELD_ADDR #undef READ_FIELD +#undef NOBARRIER_READ_FIELD #undef WRITE_FIELD +#undef NOBARRIER_WRITE_FIELD #undef WRITE_BARRIER #undef CONDITIONAL_WRITE_BARRIER #undef READ_DOUBLE_FIELD @@ -6882,6 +6857,8 @@ void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, #undef WRITE_SHORT_FIELD #undef READ_BYTE_FIELD #undef WRITE_BYTE_FIELD +#undef NOBARRIER_READ_BYTE_FIELD +#undef NOBARRIER_WRITE_BYTE_FIELD } } // namespace v8::internal diff --git a/deps/v8/src/objects-printer.cc b/deps/v8/src/objects-printer.cc index 518167cc5..4fb5b5676 100644 --- a/deps/v8/src/objects-printer.cc +++ b/deps/v8/src/objects-printer.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -37,32 +14,27 @@ namespace internal { #ifdef OBJECT_PRINT -void MaybeObject::Print() { +void Object::Print() { Print(stdout); } -void MaybeObject::Print(FILE* out) { - Object* this_as_object; - if (ToObject(&this_as_object)) { - if (this_as_object->IsSmi()) { - Smi::cast(this_as_object)->SmiPrint(out); - } else { - HeapObject::cast(this_as_object)->HeapObjectPrint(out); - } +void Object::Print(FILE* out) { + if (IsSmi()) { + Smi::cast(this)->SmiPrint(out); } else { - Failure::cast(this)->FailurePrint(out); + HeapObject::cast(this)->HeapObjectPrint(out); } Flush(out); } -void MaybeObject::PrintLn() { +void Object::PrintLn() { PrintLn(stdout); } -void MaybeObject::PrintLn(FILE* out) { +void Object::PrintLn(FILE* out) { Print(out); PrintF(out, "\n"); } @@ -174,6 +146,12 @@ void HeapObject::HeapObjectPrint(FILE* out) { case JS_MAP_TYPE: JSMap::cast(this)->JSMapPrint(out); break; + case JS_SET_ITERATOR_TYPE: + JSSetIterator::cast(this)->JSSetIteratorPrint(out); + break; + case JS_MAP_ITERATOR_TYPE: + JSMapIterator::cast(this)->JSMapIteratorPrint(out); + break; case JS_WEAK_MAP_TYPE: JSWeakMap::cast(this)->JSWeakMapPrint(out); break; @@ -271,7 +249,6 @@ void JSObject::PrintProperties(FILE* out) { case HANDLER: // only in lookup results, not in descriptors case INTERCEPTOR: // only in lookup results, not in descriptors // There are no transitions in the descriptor array. - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; @@ -428,7 +405,6 @@ void JSObject::PrintTransitions(FILE* out) { case NORMAL: case HANDLER: case INTERCEPTOR: - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; @@ -566,8 +542,6 @@ void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) { HeapObject::PrintHeader(out, "TypeFeedbackInfo"); PrintF(out, " - ic_total_count: %d, ic_with_type_info_count: %d\n", ic_total_count(), ic_with_type_info_count()); - PrintF(out, " - feedback_vector: "); - feedback_vector()->FixedArrayPrint(out); } @@ -724,7 +698,7 @@ void JSProxy::JSProxyPrint(FILE* out) { PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map())); PrintF(out, " - handler = "); handler()->Print(out); - PrintF(out, " - hash = "); + PrintF(out, "\n - hash = "); hash()->Print(out); PrintF(out, "\n"); } @@ -735,9 +709,9 @@ void JSFunctionProxy::JSFunctionProxyPrint(FILE* out) { PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map())); PrintF(out, " - handler = "); handler()->Print(out); - PrintF(out, " - call_trap = "); + PrintF(out, "\n - call_trap = "); call_trap()->Print(out); - PrintF(out, " - construct_trap = "); + PrintF(out, "\n - construct_trap = "); construct_trap()->Print(out); PrintF(out, "\n"); } @@ -761,6 +735,48 @@ void JSMap::JSMapPrint(FILE* out) { } +template<class Derived, class TableType> +void OrderedHashTableIterator<Derived, TableType>:: + OrderedHashTableIteratorPrint(FILE* out) { + PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map())); + PrintF(out, " - table = "); + table()->ShortPrint(out); + PrintF(out, "\n - index = "); + index()->ShortPrint(out); + PrintF(out, "\n - count = "); + count()->ShortPrint(out); + PrintF(out, "\n - kind = "); + kind()->ShortPrint(out); + PrintF(out, "\n - next_iterator = "); + next_iterator()->ShortPrint(out); + PrintF(out, "\n - previous_iterator = "); + previous_iterator()->ShortPrint(out); + PrintF(out, "\n"); +} + + +template void +OrderedHashTableIterator<JSSetIterator, + OrderedHashSet>::OrderedHashTableIteratorPrint(FILE* out); + + +template void +OrderedHashTableIterator<JSMapIterator, + OrderedHashMap>::OrderedHashTableIteratorPrint(FILE* out); + + +void JSSetIterator::JSSetIteratorPrint(FILE* out) { + HeapObject::PrintHeader(out, "JSSetIterator"); + OrderedHashTableIteratorPrint(out); +} + + +void JSMapIterator::JSMapIteratorPrint(FILE* out) { + HeapObject::PrintHeader(out, "JSMapIterator"); + OrderedHashTableIteratorPrint(out); +} + + void JSWeakMap::JSWeakMapPrint(FILE* out) { HeapObject::PrintHeader(out, "JSWeakMap"); PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map())); @@ -854,6 +870,7 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) { PrintF(out, " - name: "); name()->ShortPrint(out); PrintF(out, "\n - expected_nof_properties: %d", expected_nof_properties()); + PrintF(out, "\n - ast_node_count: %d", ast_node_count()); PrintF(out, "\n - instance class name = "); instance_class_name()->Print(out); PrintF(out, "\n - code = "); @@ -881,6 +898,8 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) { PrintF(out, "\n - length = %d", length()); PrintF(out, "\n - optimized_code_map = "); optimized_code_map()->ShortPrint(out); + PrintF(out, "\n - feedback_vector = "); + feedback_vector()->FixedArrayPrint(out); PrintF(out, "\n"); } @@ -1165,7 +1184,6 @@ void Script::ScriptPrint(FILE* out) { } -#ifdef ENABLE_DEBUGGER_SUPPORT void DebugInfo::DebugInfoPrint(FILE* out) { HeapObject::PrintHeader(out, "DebugInfo"); PrintF(out, "\n - shared: "); @@ -1187,7 +1205,6 @@ void BreakPointInfo::BreakPointInfoPrint(FILE* out) { PrintF(out, "\n - break_point_objects: "); break_point_objects()->ShortPrint(out); } -#endif // ENABLE_DEBUGGER_SUPPORT void DescriptorArray::PrintDescriptors(FILE* out) { @@ -1223,7 +1240,6 @@ void TransitionArray::PrintTransitions(FILE* out) { case NORMAL: case HANDLER: case INTERCEPTOR: - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; diff --git a/deps/v8/src/objects-visiting-inl.h b/deps/v8/src/objects-visiting-inl.h index 31117bb94..bb2e99243 100644 --- a/deps/v8/src/objects-visiting-inl.h +++ b/deps/v8/src/objects-visiting-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_OBJECTS_VISITING_INL_H_ #define V8_OBJECTS_VISITING_INL_H_ @@ -309,12 +286,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget( // Monomorphic ICs are preserved when possible, but need to be flushed // when they might be keeping a Context alive, or when the heap is about // to be serialized. + if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || - Serializer::enabled() || target->ic_age() != heap->global_ic_age())) { - IC::Clear(target->GetIsolate(), rinfo->pc(), - rinfo->host()->constant_pool()); + Serializer::enabled(heap->isolate()) || + target->ic_age() != heap->global_ic_age() || + target->is_invalidated_weak_stub())) { + IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool()); target = Code::GetCodeFromTargetAddress(rinfo->target_address()); } heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); @@ -427,10 +406,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCode( Map* map, HeapObject* object) { Heap* heap = map->GetHeap(); Code* code = Code::cast(object); - if (FLAG_cleanup_code_caches_at_gc) { - code->ClearTypeFeedbackInfo(heap); - } - if (FLAG_age_code && !Serializer::enabled()) { + if (FLAG_age_code && !Serializer::enabled(heap->isolate())) { code->MakeOlder(heap->mark_compact_collector()->marking_parity()); } code->CodeIterateBody<StaticVisitor>(heap); @@ -445,6 +421,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( if (shared->ic_age() != heap->global_ic_age()) { shared->ResetForNewContext(heap->global_ic_age()); } + if (FLAG_cleanup_code_caches_at_gc) { + shared->ClearTypeFeedbackInfo(); + } if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && !shared->optimized_code_map()->IsSmi()) { @@ -498,8 +477,19 @@ void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( } for (int i = 0; i < constant_pool->count_of_heap_ptr_entries(); i++) { int index = constant_pool->first_heap_ptr_index() + i; - StaticVisitor::VisitPointer(heap, - constant_pool->RawFieldOfElementAt(index)); + Object** slot = constant_pool->RawFieldOfElementAt(index); + HeapObject* object = HeapObject::cast(*slot); + heap->mark_compact_collector()->RecordSlot(slot, slot, object); + bool is_weak_object = + (constant_pool->get_weak_object_state() == + ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && + Code::IsWeakObjectInOptimizedCode(object)) || + (constant_pool->get_weak_object_state() == + ConstantPoolArray::WEAK_OBJECTS_IN_IC && + Code::IsWeakObjectInIC(object)); + if (!is_weak_object) { + StaticVisitor::MarkObject(heap, object); + } } } @@ -615,12 +605,9 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents( // array to prevent visiting it later. Skip recording the transition // array slot, since it will be implicitly recorded when the pointer // fields of this map are visited. - TransitionArray* transitions = map->unchecked_transition_array(); - if (transitions->IsTransitionArray()) { + if (map->HasTransitionArray()) { + TransitionArray* transitions = map->transitions(); MarkTransitionArray(heap, transitions); - } else { - // Already marked by marking map->GetBackPointer() above. - ASSERT(transitions->IsMap() || transitions->IsUndefined()); } // Since descriptor arrays are potentially shared, ensure that only the diff --git a/deps/v8/src/objects-visiting.cc b/deps/v8/src/objects-visiting.cc index 16c51676b..24cff3487 100644 --- a/deps/v8/src/objects-visiting.cc +++ b/deps/v8/src/objects-visiting.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -163,6 +140,8 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( case JS_GLOBAL_OBJECT_TYPE: case JS_BUILTINS_OBJECT_TYPE: case JS_MESSAGE_OBJECT_TYPE: + case JS_SET_ITERATOR_TYPE: + case JS_MAP_ITERATOR_TYPE: return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric, instance_size); @@ -211,4 +190,281 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( } } + +template <class T> +struct WeakListVisitor; + + +template <class T> +Object* VisitWeakList(Heap* heap, + Object* list, + WeakObjectRetainer* retainer, + bool record_slots) { + Object* undefined = heap->undefined_value(); + Object* head = undefined; + T* tail = NULL; + MarkCompactCollector* collector = heap->mark_compact_collector(); + while (list != undefined) { + // Check whether to keep the candidate in the list. + T* candidate = reinterpret_cast<T*>(list); + Object* retained = retainer->RetainAs(list); + if (retained != NULL) { + if (head == undefined) { + // First element in the list. + head = retained; + } else { + // Subsequent elements in the list. + ASSERT(tail != NULL); + WeakListVisitor<T>::SetWeakNext(tail, retained); + if (record_slots) { + Object** next_slot = + HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); + collector->RecordSlot(next_slot, next_slot, retained); + } + } + // Retained object is new tail. + ASSERT(!retained->IsUndefined()); + candidate = reinterpret_cast<T*>(retained); + tail = candidate; + + + // tail is a live object, visit it. + WeakListVisitor<T>::VisitLiveObject( + heap, tail, retainer, record_slots); + } else { + WeakListVisitor<T>::VisitPhantomObject(heap, candidate); + } + + // Move to next element in the list. + list = WeakListVisitor<T>::WeakNext(candidate); + } + + // Terminate the list if there is one or more elements. + if (tail != NULL) { + WeakListVisitor<T>::SetWeakNext(tail, undefined); + } + return head; +} + + +template <class T> +static void ClearWeakList(Heap* heap, + Object* list) { + Object* undefined = heap->undefined_value(); + while (list != undefined) { + T* candidate = reinterpret_cast<T*>(list); + list = WeakListVisitor<T>::WeakNext(candidate); + WeakListVisitor<T>::SetWeakNext(candidate, undefined); + } +} + + +template<> +struct WeakListVisitor<JSFunction> { + static void SetWeakNext(JSFunction* function, Object* next) { + function->set_next_function_link(next); + } + + static Object* WeakNext(JSFunction* function) { + return function->next_function_link(); + } + + static int WeakNextOffset() { + return JSFunction::kNextFunctionLinkOffset; + } + + static void VisitLiveObject(Heap*, JSFunction*, + WeakObjectRetainer*, bool) { + } + + static void VisitPhantomObject(Heap*, JSFunction*) { + } +}; + + +template<> +struct WeakListVisitor<Code> { + static void SetWeakNext(Code* code, Object* next) { + code->set_next_code_link(next); + } + + static Object* WeakNext(Code* code) { + return code->next_code_link(); + } + + static int WeakNextOffset() { + return Code::kNextCodeLinkOffset; + } + + static void VisitLiveObject(Heap*, Code*, + WeakObjectRetainer*, bool) { + } + + static void VisitPhantomObject(Heap*, Code*) { + } +}; + + +template<> +struct WeakListVisitor<Context> { + static void SetWeakNext(Context* context, Object* next) { + context->set(Context::NEXT_CONTEXT_LINK, + next, + UPDATE_WRITE_BARRIER); + } + + static Object* WeakNext(Context* context) { + return context->get(Context::NEXT_CONTEXT_LINK); + } + + static void VisitLiveObject(Heap* heap, + Context* context, + WeakObjectRetainer* retainer, + bool record_slots) { + // Process the three weak lists linked off the context. + DoWeakList<JSFunction>(heap, context, retainer, record_slots, + Context::OPTIMIZED_FUNCTIONS_LIST); + DoWeakList<Code>(heap, context, retainer, record_slots, + Context::OPTIMIZED_CODE_LIST); + DoWeakList<Code>(heap, context, retainer, record_slots, + Context::DEOPTIMIZED_CODE_LIST); + } + + template<class T> + static void DoWeakList(Heap* heap, + Context* context, + WeakObjectRetainer* retainer, + bool record_slots, + int index) { + // Visit the weak list, removing dead intermediate elements. + Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer, + record_slots); + + // Update the list head. + context->set(index, list_head, UPDATE_WRITE_BARRIER); + + if (record_slots) { + // Record the updated slot if necessary. + Object** head_slot = HeapObject::RawField( + context, FixedArray::SizeFor(index)); + heap->mark_compact_collector()->RecordSlot( + head_slot, head_slot, list_head); + } + } + + static void VisitPhantomObject(Heap* heap, Context* context) { + ClearWeakList<JSFunction>(heap, + context->get(Context::OPTIMIZED_FUNCTIONS_LIST)); + ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST)); + ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); + } + + static int WeakNextOffset() { + return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); + } +}; + + +template<> +struct WeakListVisitor<JSArrayBufferView> { + static void SetWeakNext(JSArrayBufferView* obj, Object* next) { + obj->set_weak_next(next); + } + + static Object* WeakNext(JSArrayBufferView* obj) { + return obj->weak_next(); + } + + static void VisitLiveObject(Heap*, + JSArrayBufferView* obj, + WeakObjectRetainer* retainer, + bool record_slots) {} + + static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} + + static int WeakNextOffset() { + return JSArrayBufferView::kWeakNextOffset; + } +}; + + +template<> +struct WeakListVisitor<JSArrayBuffer> { + static void SetWeakNext(JSArrayBuffer* obj, Object* next) { + obj->set_weak_next(next); + } + + static Object* WeakNext(JSArrayBuffer* obj) { + return obj->weak_next(); + } + + static void VisitLiveObject(Heap* heap, + JSArrayBuffer* array_buffer, + WeakObjectRetainer* retainer, + bool record_slots) { + Object* typed_array_obj = + VisitWeakList<JSArrayBufferView>( + heap, + array_buffer->weak_first_view(), + retainer, record_slots); + array_buffer->set_weak_first_view(typed_array_obj); + if (typed_array_obj != heap->undefined_value() && record_slots) { + Object** slot = HeapObject::RawField( + array_buffer, JSArrayBuffer::kWeakFirstViewOffset); + heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); + } + } + + static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { + Runtime::FreeArrayBuffer(heap->isolate(), phantom); + } + + static int WeakNextOffset() { + return JSArrayBuffer::kWeakNextOffset; + } +}; + + +template<> +struct WeakListVisitor<AllocationSite> { + static void SetWeakNext(AllocationSite* obj, Object* next) { + obj->set_weak_next(next); + } + + static Object* WeakNext(AllocationSite* obj) { + return obj->weak_next(); + } + + static void VisitLiveObject(Heap* heap, + AllocationSite* site, + WeakObjectRetainer* retainer, + bool record_slots) {} + + static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} + + static int WeakNextOffset() { + return AllocationSite::kWeakNextOffset; + } +}; + + +template Object* VisitWeakList<Code>( + Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + + +template Object* VisitWeakList<JSFunction>( + Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + + +template Object* VisitWeakList<Context>( + Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + + +template Object* VisitWeakList<JSArrayBuffer>( + Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + + +template Object* VisitWeakList<AllocationSite>( + Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + } } // namespace v8::internal diff --git a/deps/v8/src/objects-visiting.h b/deps/v8/src/objects-visiting.h index de8ca6d05..05f82574c 100644 --- a/deps/v8/src/objects-visiting.h +++ b/deps/v8/src/objects-visiting.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_OBJECTS_VISITING_H_ #define V8_OBJECTS_VISITING_H_ @@ -483,6 +460,20 @@ VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback> StaticMarkingVisitor<StaticVisitor>::table_; +class WeakObjectRetainer; + + +// A weak list is single linked list where each element has a weak pointer to +// the next element. Given the head of the list, this function removes dead +// elements from the list and if requested records slots for next-element +// pointers. The template parameter T is a WeakListVisitor that defines how to +// access the next-element pointers. +template <class T> +Object* VisitWeakList(Heap* heap, + Object* list, + WeakObjectRetainer* retainer, + bool record_slots); + } } // namespace v8::internal #endif // V8_OBJECTS_VISITING_H_ diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc index 45220ee29..956e46f73 100644 --- a/deps/v8/src/objects.cc +++ b/deps/v8/src/objects.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -49,6 +26,7 @@ #include "macro-assembler.h" #include "mark-compact.h" #include "safepoint-table.h" +#include "string-search.h" #include "string-stream.h" #include "utils.h" @@ -60,53 +38,43 @@ namespace v8 { namespace internal { - -MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor, - Object* value) { - Object* result; - { MaybeObject* maybe_result = - constructor->GetHeap()->AllocateJSObject(constructor); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - JSValue::cast(result)->set_value(value); - return result; -} - - -MaybeObject* Object::ToObject(Context* native_context) { - if (IsNumber()) { - return CreateJSValue(native_context->number_function(), this); - } else if (IsBoolean()) { - return CreateJSValue(native_context->boolean_function(), this); - } else if (IsString()) { - return CreateJSValue(native_context->string_function(), this); - } else if (IsSymbol()) { - return CreateJSValue(native_context->symbol_function(), this); +Handle<HeapType> Object::OptimalType(Isolate* isolate, + Representation representation) { + if (representation.IsNone()) return HeapType::None(isolate); + if (FLAG_track_field_types) { + if (representation.IsHeapObject() && IsHeapObject()) { + // We can track only JavaScript objects with stable maps. + Handle<Map> map(HeapObject::cast(this)->map(), isolate); + if (map->is_stable() && + map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE && + map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) { + return HeapType::Class(map, isolate); + } + } } - ASSERT(IsJSObject()); - return this; + return HeapType::Any(isolate); } -MaybeObject* Object::ToObject(Isolate* isolate) { - if (IsJSReceiver()) { - return this; - } else if (IsNumber()) { - Context* native_context = isolate->context()->native_context(); - return CreateJSValue(native_context->number_function(), this); - } else if (IsBoolean()) { - Context* native_context = isolate->context()->native_context(); - return CreateJSValue(native_context->boolean_function(), this); - } else if (IsString()) { - Context* native_context = isolate->context()->native_context(); - return CreateJSValue(native_context->string_function(), this); - } else if (IsSymbol()) { - Context* native_context = isolate->context()->native_context(); - return CreateJSValue(native_context->symbol_function(), this); +MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, + Handle<Object> object, + Handle<Context> native_context) { + if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object); + Handle<JSFunction> constructor; + if (object->IsNumber()) { + constructor = handle(native_context->number_function(), isolate); + } else if (object->IsBoolean()) { + constructor = handle(native_context->boolean_function(), isolate); + } else if (object->IsString()) { + constructor = handle(native_context->string_function(), isolate); + } else if (object->IsSymbol()) { + constructor = handle(native_context->symbol_function(), isolate); + } else { + return MaybeHandle<JSReceiver>(); } - - // Throw a type error. - return Failure::InternalError(); + Handle<JSObject> result = isolate->factory()->NewJSObject(constructor); + Handle<JSValue>::cast(result)->set_value(*object); + return result; } @@ -132,7 +100,8 @@ bool Object::IsCallable() { } -void Object::Lookup(Name* name, LookupResult* result) { +void Object::Lookup(Handle<Name> name, LookupResult* result) { + DisallowHeapAllocation no_gc; Object* holder = NULL; if (IsJSReceiver()) { holder = this; @@ -156,31 +125,20 @@ void Object::Lookup(Name* name, LookupResult* result) { } -Handle<Object> Object::GetPropertyWithReceiver( +MaybeHandle<Object> Object::GetPropertyWithReceiver( Handle<Object> object, Handle<Object> receiver, Handle<Name> name, PropertyAttributes* attributes) { LookupResult lookup(name->GetIsolate()); - object->Lookup(*name, &lookup); - Handle<Object> result = + object->Lookup(name, &lookup); + MaybeHandle<Object> result = GetProperty(object, receiver, &lookup, name, attributes); ASSERT(*attributes <= ABSENT); return result; } -MaybeObject* Object::GetPropertyWithReceiver(Object* receiver, - Name* name, - PropertyAttributes* attributes) { - LookupResult result(name->GetIsolate()); - Lookup(name, &result); - MaybeObject* value = GetProperty(receiver, &result, name, attributes); - ASSERT(*attributes <= ABSENT); - return value; -} - - bool Object::ToInt32(int32_t* value) { if (IsSmi()) { *value = Smi::cast(this)->value(); @@ -249,9 +207,9 @@ static inline To* CheckedCast(void *from) { } -static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor, - char* ptr, - Heap* heap) { +static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor, + char* ptr, + Isolate* isolate) { uint32_t bitmask = descriptor.bitmask; uint32_t compare_value = descriptor.compare_value; uint32_t value; @@ -271,26 +229,27 @@ static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor, break; default: UNREACHABLE(); - return NULL; + return isolate->factory()->undefined_value(); } - return heap->ToBoolean((bitmask & value) == (bitmask & compare_value)); + return isolate->factory()->ToBoolean( + (bitmask & value) == (bitmask & compare_value)); } -static MaybeObject* PerformCompare(const PointerCompareDescriptor& descriptor, - char* ptr, - Heap* heap) { +static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor, + char* ptr, + Isolate* isolate) { uintptr_t compare_value = reinterpret_cast<uintptr_t>(descriptor.compare_value); uintptr_t value = *CheckedCast<uintptr_t>(ptr); - return heap->ToBoolean(compare_value == value); + return isolate->factory()->ToBoolean(compare_value == value); } -static MaybeObject* GetPrimitiveValue( +static Handle<Object> GetPrimitiveValue( const PrimitiveValueDescriptor& descriptor, char* ptr, - Heap* heap) { + Isolate* isolate) { int32_t int32_value = 0; switch (descriptor.data_type) { case kDescriptorInt8Type: @@ -310,29 +269,36 @@ static MaybeObject* GetPrimitiveValue( break; case kDescriptorUint32Type: { uint32_t value = *CheckedCast<uint32_t>(ptr); - return heap->NumberFromUint32(value); + AllowHeapAllocation allow_gc; + return isolate->factory()->NewNumberFromUint(value); } case kDescriptorBoolType: { uint8_t byte = *CheckedCast<uint8_t>(ptr); - return heap->ToBoolean(byte & (0x1 << descriptor.bool_offset)); + return isolate->factory()->ToBoolean( + byte & (0x1 << descriptor.bool_offset)); } case kDescriptorFloatType: { float value = *CheckedCast<float>(ptr); - return heap->NumberFromDouble(value); + AllowHeapAllocation allow_gc; + return isolate->factory()->NewNumber(value); } case kDescriptorDoubleType: { double value = *CheckedCast<double>(ptr); - return heap->NumberFromDouble(value); + AllowHeapAllocation allow_gc; + return isolate->factory()->NewNumber(value); } } - return heap->NumberFromInt32(int32_value); + AllowHeapAllocation allow_gc; + return isolate->factory()->NewNumberFromInt(int32_value); } -static MaybeObject* GetDeclaredAccessorProperty(Object* receiver, - DeclaredAccessorInfo* info, - Isolate* isolate) { - char* current = reinterpret_cast<char*>(receiver); +static Handle<Object> GetDeclaredAccessorProperty( + Handle<Object> receiver, + Handle<DeclaredAccessorInfo> info, + Isolate* isolate) { + DisallowHeapAllocation no_gc; + char* current = reinterpret_cast<char*>(*receiver); DeclaredAccessorDescriptorIterator iterator(info->descriptor()); while (true) { const DeclaredAccessorDescriptorData* data = iterator.Next(); @@ -340,7 +306,7 @@ static MaybeObject* GetDeclaredAccessorProperty(Object* receiver, case kDescriptorReturnObject: { ASSERT(iterator.Complete()); current = *CheckedCast<char*>(current); - return *CheckedCast<Object*>(current); + return handle(*CheckedCast<Object*>(current), isolate); } case kDescriptorPointerDereference: ASSERT(!iterator.Complete()); @@ -363,49 +329,44 @@ static MaybeObject* GetDeclaredAccessorProperty(Object* receiver, ASSERT(iterator.Complete()); return PerformCompare(data->bitmask_compare_descriptor, current, - isolate->heap()); + isolate); case kDescriptorPointerCompare: ASSERT(iterator.Complete()); return PerformCompare(data->pointer_compare_descriptor, current, - isolate->heap()); + isolate); case kDescriptorPrimitiveValue: ASSERT(iterator.Complete()); return GetPrimitiveValue(data->primitive_value_descriptor, current, - isolate->heap()); + isolate); } } UNREACHABLE(); - return NULL; + return isolate->factory()->undefined_value(); } Handle<FixedArray> JSObject::EnsureWritableFastElements( Handle<JSObject> object) { - CALL_HEAP_FUNCTION(object->GetIsolate(), - object->EnsureWritableFastElements(), - FixedArray); + ASSERT(object->HasFastSmiOrObjectElements()); + Isolate* isolate = object->GetIsolate(); + Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate); + if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; + Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap( + elems, isolate->factory()->fixed_array_map()); + object->set_elements(*writable_elems); + isolate->counters()->cow_arrays_converted()->Increment(); + return writable_elems; } -Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, - Handle<Object> receiver, - Handle<Object> structure, - Handle<Name> name) { +MaybeHandle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, + Handle<Object> receiver, + Handle<Object> structure, + Handle<Name> name) { Isolate* isolate = name->GetIsolate(); - // To accommodate both the old and the new api we switch on the - // data structure used to store the callbacks. Eventually foreign - // callbacks should be phased out. - if (structure->IsForeign()) { - AccessorDescriptor* callback = - reinterpret_cast<AccessorDescriptor*>( - Handle<Foreign>::cast(structure)->foreign_address()); - CALL_HEAP_FUNCTION(isolate, - (callback->getter)(isolate, *receiver, callback->data), - Object); - } - + ASSERT(!structure->IsForeign()); // api style callbacks. if (structure->IsAccessorInfo()) { Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure); @@ -415,19 +376,16 @@ Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, isolate->factory()->NewTypeError("incompatible_method_receiver", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>::null(); + return isolate->Throw<Object>(error); } // TODO(rossberg): Handling symbols in the API requires changing the API, // so we do not support it for now. if (name->IsSymbol()) return isolate->factory()->undefined_value(); if (structure->IsDeclaredAccessorInfo()) { - CALL_HEAP_FUNCTION( - isolate, - GetDeclaredAccessorProperty(*receiver, - DeclaredAccessorInfo::cast(*structure), - isolate), - Object); + return GetDeclaredAccessorProperty( + receiver, + Handle<DeclaredAccessorInfo>::cast(structure), + isolate); } Handle<ExecutableAccessorInfo> data = @@ -436,20 +394,19 @@ Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, v8::ToCData<v8::AccessorGetterCallback>(data->getter()); if (call_fun == NULL) return isolate->factory()->undefined_value(); - HandleScope scope(isolate); - Handle<JSObject> self = Handle<JSObject>::cast(receiver); Handle<String> key = Handle<String>::cast(name); - LOG(isolate, ApiNamedPropertyAccess("load", *self, *name)); - PropertyCallbackArguments args(isolate, data->data(), *self, *object); + LOG(isolate, ApiNamedPropertyAccess("load", *object, *name)); + PropertyCallbackArguments args(isolate, data->data(), *receiver, *object); v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (result.IsEmpty()) { return isolate->factory()->undefined_value(); } Handle<Object> return_value = v8::Utils::OpenHandle(*result); return_value->VerifyApiCallResultType(); - return scope.CloseAndEscape(return_value); + // Rebox handle before return. + return handle(*return_value, isolate); } // __defineGetter__ callback @@ -457,102 +414,47 @@ Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, isolate); if (getter->IsSpecFunction()) { // TODO(rossberg): nicer would be to cast to some JSCallable here... - CALL_HEAP_FUNCTION( - isolate, - object->GetPropertyWithDefinedGetter(*receiver, - JSReceiver::cast(*getter)), - Object); + return Object::GetPropertyWithDefinedGetter( + object, receiver, Handle<JSReceiver>::cast(getter)); } // Getter is not a function. return isolate->factory()->undefined_value(); } -MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw, - Name* name_raw) { - Isolate* isolate = GetIsolate(); - HandleScope scope(isolate); - Handle<Object> receiver(receiver_raw, isolate); - Handle<Object> name(name_raw, isolate); +MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy, + Handle<Object> receiver, + Handle<Name> name) { + Isolate* isolate = proxy->GetIsolate(); // TODO(rossberg): adjust once there is a story for symbols vs proxies. - if (name->IsSymbol()) return isolate->heap()->undefined_value(); + if (name->IsSymbol()) return isolate->factory()->undefined_value(); Handle<Object> args[] = { receiver, name }; - Handle<Object> result = CallTrap( - "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return Failure::Exception(); - - return *result; -} - - -Handle<Object> Object::GetProperty(Handle<Object> object, - Handle<Name> name) { - // TODO(rossberg): The index test should not be here but in the GetProperty - // method (or somewhere else entirely). Needs more global clean-up. - uint32_t index; - Isolate* isolate = name->GetIsolate(); - if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index); - CALL_HEAP_FUNCTION(isolate, object->GetProperty(*name), Object); -} - - -MaybeObject* JSProxy::GetElementWithHandler(Object* receiver, - uint32_t index) { - String* name; - MaybeObject* maybe = GetHeap()->Uint32ToString(index); - if (!maybe->To<String>(&name)) return maybe; - return GetPropertyWithHandler(receiver, name); + return CallTrap( + proxy, "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args); } -Handle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy, - Handle<JSReceiver> receiver, - uint32_t index, - Handle<Object> value, - StrictMode strict_mode) { - Isolate* isolate = proxy->GetIsolate(); - Handle<String> name = isolate->factory()->Uint32ToString(index); - return SetPropertyWithHandler( - proxy, receiver, name, value, NONE, strict_mode); -} - - -bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) { - Isolate* isolate = proxy->GetIsolate(); - Handle<String> name = isolate->factory()->Uint32ToString(index); - return HasPropertyWithHandler(proxy, name); -} - - -MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver, - JSReceiver* getter) { +MaybeHandle<Object> Object::GetPropertyWithDefinedGetter( + Handle<Object> object, + Handle<Object> receiver, + Handle<JSReceiver> getter) { Isolate* isolate = getter->GetIsolate(); - HandleScope scope(isolate); - Handle<JSReceiver> fun(getter); - Handle<Object> self(receiver, isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT Debug* debug = isolate->debug(); // Handle stepping into a getter if step into is active. // TODO(rossberg): should this apply to getters that are function proxies? - if (debug->StepInActive() && fun->IsJSFunction()) { + if (debug->StepInActive() && getter->IsJSFunction()) { debug->HandleStepIn( - Handle<JSFunction>::cast(fun), Handle<Object>::null(), 0, false); + Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false); } -#endif - bool has_pending_exception; - Handle<Object> result = Execution::Call( - isolate, fun, self, 0, NULL, &has_pending_exception, true); - // Check for pending exception and return the result. - if (has_pending_exception) return Failure::Exception(); - return *result; + return Execution::Call(isolate, getter, receiver, 0, NULL, true); } // Only deal with CALLBACKS and INTERCEPTOR -Handle<Object> JSObject::GetPropertyWithFailedAccessCheck( +MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck( Handle<JSObject> object, Handle<Object> receiver, LookupResult* result, @@ -582,7 +484,7 @@ Handle<Object> JSObject::GetPropertyWithFailedAccessCheck( case CONSTANT: { // Search ALL_CAN_READ accessors in prototype chain. LookupResult r(isolate); - result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r); + result->holder()->LookupRealNamedPropertyInPrototypes(name, &r); if (r.IsProperty()) { return GetPropertyWithFailedAccessCheck( object, receiver, &r, name, attributes); @@ -593,7 +495,7 @@ Handle<Object> JSObject::GetPropertyWithFailedAccessCheck( // If the object has an interceptor, try real named properties. // No access check in GetPropertyAttributeWithInterceptor. LookupResult r(isolate); - result->holder()->LookupRealNamedProperty(*name, &r); + result->holder()->LookupRealNamedProperty(name, &r); if (r.IsProperty()) { return GetPropertyWithFailedAccessCheck( object, receiver, &r, name, attributes); @@ -607,8 +509,8 @@ Handle<Object> JSObject::GetPropertyWithFailedAccessCheck( // No accessible property found. *attributes = ABSENT; - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_GET); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + isolate->ReportFailedAccessCheck(object, v8::ACCESS_GET); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->undefined_value(); } @@ -643,7 +545,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( if (!continue_search) break; // Search ALL_CAN_READ accessors in prototype chain. LookupResult r(object->GetIsolate()); - result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r); + result->holder()->LookupRealNamedPropertyInPrototypes(name, &r); if (r.IsProperty()) { return GetPropertyAttributeWithFailedAccessCheck( object, &r, name, continue_search); @@ -656,9 +558,9 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( // No access check in GetPropertyAttributeWithInterceptor. LookupResult r(object->GetIsolate()); if (continue_search) { - result->holder()->LookupRealNamedProperty(*name, &r); + result->holder()->LookupRealNamedProperty(name, &r); } else { - result->holder()->LocalLookupRealNamedProperty(*name, &r); + result->holder()->LocalLookupRealNamedProperty(name, &r); } if (!r.IsFound()) break; return GetPropertyAttributeWithFailedAccessCheck( @@ -666,13 +568,13 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( } case HANDLER: - case TRANSITION: case NONEXISTENT: UNREACHABLE(); } } - object->GetIsolate()->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); + object->GetIsolate()->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return ABSENT; } @@ -688,6 +590,20 @@ Object* JSObject::GetNormalizedProperty(const LookupResult* result) { } +Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object, + const LookupResult* result) { + ASSERT(!object->HasFastProperties()); + Isolate* isolate = object->GetIsolate(); + Handle<Object> value(object->property_dictionary()->ValueAt( + result->GetDictionaryEntry()), isolate); + if (object->IsGlobalObject()) { + value = Handle<Object>(Handle<PropertyCell>::cast(value)->value(), isolate); + } + ASSERT(!value->IsPropertyCell() && !value->IsCell()); + return value; +} + + void JSObject::SetNormalizedProperty(Handle<JSObject> object, const LookupResult* result, Handle<Object> value) { @@ -703,17 +619,6 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, } -// TODO(mstarzinger): Temporary wrapper until handlified. -static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict, - Handle<Name> name, - Handle<Object> value, - PropertyDetails details) { - CALL_HEAP_FUNCTION(dict->GetIsolate(), - dict->Add(*name, *value, details), - NameDictionary); -} - - void JSObject::SetNormalizedProperty(Handle<JSObject> object, Handle<Name> name, Handle<Object> value, @@ -726,15 +631,15 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, Handle<String>::cast(name)); } - int entry = property_dictionary->FindEntry(*name); + int entry = property_dictionary->FindEntry(name); if (entry == NameDictionary::kNotFound) { Handle<Object> store_value = value; if (object->IsGlobalObject()) { store_value = object->GetIsolate()->factory()->NewPropertyCell(value); } - property_dictionary = - NameDictionaryAdd(property_dictionary, name, store_value, details); + property_dictionary = NameDictionary::Add( + property_dictionary, name, store_value, details); object->set_properties(*property_dictionary); return; } @@ -760,25 +665,18 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, // Please note we have to update the property details. property_dictionary->DetailsAtPut(entry, details); } else { - property_dictionary->SetEntry(entry, *name, *value, details); + property_dictionary->SetEntry(entry, name, value, details); } } -// TODO(mstarzinger): Temporary wrapper until target is handlified. -Handle<NameDictionary> NameDictionaryShrink(Handle<NameDictionary> dict, - Handle<Name> name) { - CALL_HEAP_FUNCTION(dict->GetIsolate(), dict->Shrink(*name), NameDictionary); -} - - Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, Handle<Name> name, DeleteMode mode) { ASSERT(!object->HasFastProperties()); Isolate* isolate = object->GetIsolate(); Handle<NameDictionary> dictionary(object->property_dictionary()); - int entry = dictionary->FindEntry(*name); + int entry = dictionary->FindEntry(name); if (entry != NameDictionary::kNotFound) { // If we have a global object set the cell to the hole. if (object->IsGlobalObject()) { @@ -798,10 +696,11 @@ Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, PropertyCell::SetValueInferType(cell, value); dictionary->DetailsAtPut(entry, details.AsDeleted()); } else { - Handle<Object> deleted(dictionary->DeleteProperty(entry, mode), isolate); + Handle<Object> deleted( + NameDictionary::DeleteProperty(dictionary, entry, mode)); if (*deleted == isolate->heap()->true_value()) { Handle<NameDictionary> new_properties = - NameDictionaryShrink(dictionary, name); + NameDictionary::Shrink(dictionary, name); object->set_properties(*new_properties); } return deleted; @@ -826,44 +725,13 @@ bool JSObject::IsDirty() { } -Handle<Object> Object::GetProperty(Handle<Object> object, - Handle<Object> receiver, - LookupResult* result, - Handle<Name> key, - PropertyAttributes* attributes) { - Isolate* isolate = result->isolate(); - CALL_HEAP_FUNCTION( - isolate, - object->GetProperty(*receiver, result, *key, attributes), - Object); -} - - -MaybeObject* Object::GetPropertyOrFail(Handle<Object> object, - Handle<Object> receiver, - LookupResult* result, - Handle<Name> key, - PropertyAttributes* attributes) { - Isolate* isolate = result->isolate(); - CALL_HEAP_FUNCTION_PASS_EXCEPTION( - isolate, - object->GetProperty(*receiver, result, *key, attributes)); -} - - -// TODO(yangguo): handlify this and get rid of. -MaybeObject* Object::GetProperty(Object* receiver, - LookupResult* result, - Name* name, - PropertyAttributes* attributes) { +MaybeHandle<Object> Object::GetProperty(Handle<Object> object, + Handle<Object> receiver, + LookupResult* result, + Handle<Name> name, + PropertyAttributes* attributes) { Isolate* isolate = name->GetIsolate(); - Heap* heap = isolate->heap(); - -#ifdef DEBUG - // TODO(mstarzinger): Only because of the AssertNoContextChange, drop as soon - // as this method has been fully handlified. - HandleScope scope(isolate); -#endif + Factory* factory = isolate->factory(); // Make sure that the top context does not change when doing // callbacks or interceptor calls. @@ -877,95 +745,77 @@ MaybeObject* Object::GetProperty(Object* receiver, // holder in the prototype chain. // Proxy handlers do not use the proxy's prototype, so we can skip this. if (!result->IsHandler()) { - Object* last = result->IsProperty() - ? result->holder() - : Object::cast(heap->null_value()); - ASSERT(this != this->GetPrototype(isolate)); - for (Object* current = this; + ASSERT(*object != object->GetPrototype(isolate)); + Handle<Object> last = result->IsProperty() + ? Handle<Object>(result->holder(), isolate) + : Handle<Object>::cast(factory->null_value()); + for (Handle<Object> current = object; true; - current = current->GetPrototype(isolate)) { + current = Handle<Object>(current->GetPrototype(isolate), isolate)) { if (current->IsAccessCheckNeeded()) { // Check if we're allowed to read from the current object. Note // that even though we may not actually end up loading the named // property from the current object, we still check that we have // access to it. - JSObject* checked = JSObject::cast(current); + Handle<JSObject> checked = Handle<JSObject>::cast(current); if (!isolate->MayNamedAccess(checked, name, v8::ACCESS_GET)) { - HandleScope scope(isolate); - Handle<Object> value = JSObject::GetPropertyWithFailedAccessCheck( - handle(checked, isolate), - handle(receiver, isolate), - result, - handle(name, isolate), - attributes); - RETURN_IF_EMPTY_HANDLE(isolate, value); - return *value; + return JSObject::GetPropertyWithFailedAccessCheck( + checked, receiver, result, name, attributes); } } // Stop traversing the chain once we reach the last object in the // chain; either the holder of the result or null in case of an // absent property. - if (current == last) break; + if (current.is_identical_to(last)) break; } } if (!result->IsProperty()) { *attributes = ABSENT; - return heap->undefined_value(); + return factory->undefined_value(); } *attributes = result->GetAttributes(); - Object* value; + + Handle<Object> value; switch (result->type()) { - case NORMAL: - value = result->holder()->GetNormalizedProperty(result); - ASSERT(!value->IsTheHole() || result->IsReadOnly()); - return value->IsTheHole() ? heap->undefined_value() : value; - case FIELD: { - MaybeObject* maybe_result = result->holder()->FastPropertyAt( - result->representation(), - result->GetFieldIndex().field_index()); - if (!maybe_result->To(&value)) return maybe_result; - ASSERT(!value->IsTheHole() || result->IsReadOnly()); - return value->IsTheHole() ? heap->undefined_value() : value; + case NORMAL: { + value = JSObject::GetNormalizedProperty( + handle(result->holder(), isolate), result); + break; } + case FIELD: + value = JSObject::FastPropertyAt(handle(result->holder(), isolate), + result->representation(), + result->GetFieldIndex().field_index()); + break; case CONSTANT: - return result->GetConstant(); - case CALLBACKS: { - HandleScope scope(isolate); - Handle<Object> value = JSObject::GetPropertyWithCallback( + return handle(result->GetConstant(), isolate); + case CALLBACKS: + return JSObject::GetPropertyWithCallback( handle(result->holder(), isolate), - handle(receiver, isolate), + receiver, handle(result->GetCallbackObject(), isolate), - handle(name, isolate)); - RETURN_IF_EMPTY_HANDLE(isolate, value); - return *value; - } + name); case HANDLER: - return result->proxy()->GetPropertyWithHandler(receiver, name); - case INTERCEPTOR: { - HandleScope scope(isolate); - Handle<Object> value = JSObject::GetPropertyWithInterceptor( - handle(result->holder(), isolate), - handle(receiver, isolate), - handle(name, isolate), - attributes); - RETURN_IF_EMPTY_HANDLE(isolate, value); - return *value; - } - case TRANSITION: + return JSProxy::GetPropertyWithHandler( + handle(result->proxy(), isolate), receiver, name); + case INTERCEPTOR: + return JSObject::GetPropertyWithInterceptor( + handle(result->holder(), isolate), receiver, name, attributes); case NONEXISTENT: UNREACHABLE(); break; } - UNREACHABLE(); - return NULL; + ASSERT(!value->IsTheHole() || result->IsReadOnly()); + return value->IsTheHole() ? Handle<Object>::cast(factory->undefined_value()) + : value; } -Handle<Object> Object::GetElementWithReceiver(Isolate* isolate, - Handle<Object> object, - Handle<Object> receiver, - uint32_t index) { +MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate, + Handle<Object> object, + Handle<Object> receiver, + uint32_t index) { Handle<Object> holder; // Iterate up the prototype chain until an element is found or the null @@ -988,10 +838,8 @@ Handle<Object> Object::GetElementWithReceiver(Isolate* isolate, holder = Handle<Object>( native_context->boolean_function()->instance_prototype(), isolate); } else if (holder->IsJSProxy()) { - CALL_HEAP_FUNCTION(isolate, - Handle<JSProxy>::cast(holder)->GetElementWithHandler( - *receiver, index), - Object); + return JSProxy::GetElementWithHandler( + Handle<JSProxy>::cast(holder), receiver, index); } else { // Undefined and null have no indexed properties. ASSERT(holder->IsUndefined() || holder->IsNull()); @@ -1006,9 +854,9 @@ Handle<Object> Object::GetElementWithReceiver(Isolate* isolate, // Check access rights if needed. if (js_object->IsAccessCheckNeeded()) { - if (!isolate->MayIndexedAccessWrapper(js_object, index, v8::ACCESS_GET)) { - isolate->ReportFailedAccessCheckWrapper(js_object, v8::ACCESS_GET); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) { + isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->undefined_value(); } } @@ -1018,9 +866,11 @@ Handle<Object> Object::GetElementWithReceiver(Isolate* isolate, } if (js_object->elements() != isolate->heap()->empty_fixed_array()) { - Handle<Object> result = js_object->GetElementsAccessor()->Get( - receiver, js_object, index); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + js_object->GetElementsAccessor()->Get(receiver, js_object, index), + Object); if (!result->IsTheHole()) return result; } } @@ -1030,6 +880,7 @@ Handle<Object> Object::GetElementWithReceiver(Isolate* isolate, Object* Object::GetPrototype(Isolate* isolate) { + DisallowHeapAllocation no_alloc; if (IsSmi()) { Context* context = isolate->context()->native_context(); return context->number_function()->instance_prototype(); @@ -1061,6 +912,12 @@ Object* Object::GetPrototype(Isolate* isolate) { } +Handle<Object> Object::GetPrototype(Isolate* isolate, + Handle<Object> object) { + return handle(object->GetPrototype(isolate), isolate); +} + + Map* Object::GetMarkerMap(Isolate* isolate) { if (IsSmi()) return isolate->heap()->heap_number_map(); return HeapObject::cast(this)->map(); @@ -1131,8 +988,6 @@ void Object::ShortPrint(FILE* out) { void Object::ShortPrint(StringStream* accumulator) { if (IsSmi()) { Smi::cast(this)->SmiPrint(accumulator); - } else if (IsFailure()) { - Failure::cast(this)->FailurePrint(accumulator); } else { HeapObject::cast(this)->HeapObjectShortPrint(accumulator); } @@ -1149,16 +1004,6 @@ void Smi::SmiPrint(StringStream* accumulator) { } -void Failure::FailurePrint(StringStream* accumulator) { - accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value())); -} - - -void Failure::FailurePrint(FILE* out) { - PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value())); -} - - // Should a word be prefixed by 'a' or 'an' in order to read naturally in // English? Returns false for non-ASCII or words that don't start with // a capital letter. The a/an rule follows pronunciation in English. @@ -1183,70 +1028,36 @@ static bool AnWord(String* str) { } -MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) { -#ifdef DEBUG - // Do not attempt to flatten in debug mode when allocation is not - // allowed. This is to avoid an assertion failure when allocating. - // Flattening strings is the only case where we always allow - // allocation because no GC is performed if the allocation fails. - if (!AllowHeapAllocation::IsAllowed()) return this; -#endif - - Heap* heap = GetHeap(); - switch (StringShape(this).representation_tag()) { - case kConsStringTag: { - ConsString* cs = ConsString::cast(this); - if (cs->second()->length() == 0) { - return cs->first(); - } - // There's little point in putting the flat string in new space if the - // cons string is in old space. It can never get GCed until there is - // an old space GC. - PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED; - int len = length(); - Object* object; - String* result; - if (IsOneByteRepresentation()) { - { MaybeObject* maybe_object = - heap->AllocateRawOneByteString(len, tenure); - if (!maybe_object->ToObject(&object)) return maybe_object; - } - result = String::cast(object); - String* first = cs->first(); - int first_length = first->length(); - uint8_t* dest = SeqOneByteString::cast(result)->GetChars(); - WriteToFlat(first, dest, 0, first_length); - String* second = cs->second(); - WriteToFlat(second, - dest + first_length, - 0, - len - first_length); - } else { - { MaybeObject* maybe_object = - heap->AllocateRawTwoByteString(len, tenure); - if (!maybe_object->ToObject(&object)) return maybe_object; - } - result = String::cast(object); - uc16* dest = SeqTwoByteString::cast(result)->GetChars(); - String* first = cs->first(); - int first_length = first->length(); - WriteToFlat(first, dest, 0, first_length); - String* second = cs->second(); - WriteToFlat(second, - dest + first_length, - 0, - len - first_length); - } - cs->set_first(result); - cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER); - return result; - } - default: - return this; +Handle<String> String::SlowFlatten(Handle<ConsString> cons, + PretenureFlag pretenure) { + ASSERT(AllowHeapAllocation::IsAllowed()); + ASSERT(cons->second()->length() != 0); + Isolate* isolate = cons->GetIsolate(); + int length = cons->length(); + PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure + : TENURED; + Handle<SeqString> result; + if (cons->IsOneByteRepresentation()) { + Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString( + length, tenure).ToHandleChecked(); + DisallowHeapAllocation no_gc; + WriteToFlat(*cons, flat->GetChars(), 0, length); + result = flat; + } else { + Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString( + length, tenure).ToHandleChecked(); + DisallowHeapAllocation no_gc; + WriteToFlat(*cons, flat->GetChars(), 0, length); + result = flat; } + cons->set_first(*result); + cons->set_second(isolate->heap()->empty_string()); + ASSERT(result->IsFlat()); + return result; } + bool String::MakeExternal(v8::String::ExternalStringResource* resource) { // Externalizing twice leaks the external resource, so it's // prohibited by the API. @@ -1280,34 +1091,39 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) { // In either case we resort to a short external string instead, omitting // the field caching the address of the backing store. When we encounter // short external strings in generated code, we need to bailout to runtime. + Map* new_map; if (size < ExternalString::kSize || heap->old_pointer_space()->Contains(this)) { - this->set_map_no_write_barrier( - is_internalized - ? (is_ascii - ? heap-> - short_external_internalized_string_with_one_byte_data_map() - : heap->short_external_internalized_string_map()) - : (is_ascii - ? heap->short_external_string_with_one_byte_data_map() - : heap->short_external_string_map())); + new_map = is_internalized + ? (is_ascii + ? heap-> + short_external_internalized_string_with_one_byte_data_map() + : heap->short_external_internalized_string_map()) + : (is_ascii + ? heap->short_external_string_with_one_byte_data_map() + : heap->short_external_string_map()); } else { - this->set_map_no_write_barrier( - is_internalized - ? (is_ascii - ? heap->external_internalized_string_with_one_byte_data_map() - : heap->external_internalized_string_map()) - : (is_ascii - ? heap->external_string_with_one_byte_data_map() - : heap->external_string_map())); + new_map = is_internalized + ? (is_ascii + ? heap->external_internalized_string_with_one_byte_data_map() + : heap->external_internalized_string_map()) + : (is_ascii + ? heap->external_string_with_one_byte_data_map() + : heap->external_string_map()); } + + // Byte size of the external String object. + int new_size = this->SizeFromMap(new_map); + heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); + + // We are storing the new map using release store after creating a filler for + // the left-over space to avoid races with the sweeper thread. + this->synchronized_set_map(new_map); + ExternalTwoByteString* self = ExternalTwoByteString::cast(this); self->set_resource(resource); if (is_internalized) self->Hash(); // Force regeneration of the hash value. - // Fill the remainder of the string with dead wood. - int new_size = this->Size(); // Byte size of the external String object. - heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); return true; } @@ -1347,23 +1163,30 @@ bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) { // In either case we resort to a short external string instead, omitting // the field caching the address of the backing store. When we encounter // short external strings in generated code, we need to bailout to runtime. + Map* new_map; if (size < ExternalString::kSize || heap->old_pointer_space()->Contains(this)) { - this->set_map_no_write_barrier( - is_internalized ? heap->short_external_ascii_internalized_string_map() - : heap->short_external_ascii_string_map()); + new_map = is_internalized + ? heap->short_external_ascii_internalized_string_map() + : heap->short_external_ascii_string_map(); } else { - this->set_map_no_write_barrier( - is_internalized ? heap->external_ascii_internalized_string_map() - : heap->external_ascii_string_map()); + new_map = is_internalized + ? heap->external_ascii_internalized_string_map() + : heap->external_ascii_string_map(); } + + // Byte size of the external String object. + int new_size = this->SizeFromMap(new_map); + heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); + + // We are storing the new map using release store after creating a filler for + // the left-over space to avoid races with the sweeper thread. + this->synchronized_set_map(new_map); + ExternalAsciiString* self = ExternalAsciiString::cast(this); self->set_resource(resource); if (is_internalized) self->Hash(); // Force regeneration of the hash value. - // Fill the remainder of the string with dead wood. - int new_size = this->Size(); // Byte size of the external String object. - heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); return true; } @@ -1560,7 +1383,9 @@ void Map::PrintGeneralization(FILE* file, int descriptors, bool constant_to_field, Representation old_representation, - Representation new_representation) { + Representation new_representation, + HeapType* old_field_type, + HeapType* new_field_type) { PrintF(file, "[generalizing "); constructor_name()->PrintOn(file); PrintF(file, "] "); @@ -1570,13 +1395,19 @@ void Map::PrintGeneralization(FILE* file, } else { PrintF(file, "{symbol %p}", static_cast<void*>(name)); } + PrintF(file, ":"); if (constant_to_field) { - PrintF(file, ":c->f"); + PrintF(file, "c"); } else { - PrintF(file, ":%s->%s", - old_representation.Mnemonic(), - new_representation.Mnemonic()); - } + PrintF(file, "%s", old_representation.Mnemonic()); + PrintF(file, "{"); + old_field_type->TypePrint(file, HeapType::SEMANTIC_DIM); + PrintF(file, "}"); + } + PrintF(file, "->%s", new_representation.Mnemonic()); + PrintF(file, "{"); + new_field_type->TypePrint(file, HeapType::SEMANTIC_DIM); + PrintF(file, "}"); PrintF(file, " ("); if (strlen(reason) > 0) { PrintF(file, "%s", reason); @@ -1805,6 +1636,8 @@ void HeapObject::IterateBody(InstanceType type, int object_size, case JS_DATA_VIEW_TYPE: case JS_SET_TYPE: case JS_MAP_TYPE: + case JS_SET_ITERATOR_TYPE: + case JS_MAP_ITERATOR_TYPE: case JS_WEAK_MAP_TYPE: case JS_WEAK_SET_TYPE: case JS_REGEXP_TYPE: @@ -1954,28 +1787,38 @@ String* JSReceiver::constructor_name() { } -// TODO(mstarzinger): Temporary wrapper until handlified. -static Handle<Object> NewStorageFor(Isolate* isolate, - Handle<Object> object, - Representation representation) { - Heap* heap = isolate->heap(); - CALL_HEAP_FUNCTION(isolate, - object->AllocateNewStorageFor(heap, representation), - Object); -} +MaybeHandle<Map> Map::CopyWithField(Handle<Map> map, + Handle<Name> name, + Handle<HeapType> type, + PropertyAttributes attributes, + Representation representation, + TransitionFlag flag) { + ASSERT(DescriptorArray::kNotFound == + map->instance_descriptors()->Search( + *name, map->NumberOfOwnDescriptors())); + // Ensure the descriptor array does not get too big. + if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { + return MaybeHandle<Map>(); + } -static MaybeObject* CopyAddFieldDescriptor(Map* map, - Name* name, - int index, - PropertyAttributes attributes, - Representation representation, - TransitionFlag flag) { - Map* new_map; - FieldDescriptor new_field_desc(name, index, attributes, representation); - MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag); - if (!maybe_map->To(&new_map)) return maybe_map; - int unused_property_fields = map->unused_property_fields() - 1; + // Normalize the object if the name is an actual name (not the + // hidden strings) and is not a real identifier. + // Normalize the object if it will have too many fast properties. + Isolate* isolate = map->GetIsolate(); + if (!name->IsCacheable(isolate)) return MaybeHandle<Map>(); + + // Compute the new index for new field. + int index = map->NextFreePropertyIndex(); + + if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) { + representation = Representation::Tagged(); + type = HeapType::Any(isolate); + } + + FieldDescriptor new_field_desc(name, index, type, attributes, representation); + Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag); + int unused_property_fields = new_map->unused_property_fields() - 1; if (unused_property_fields < 0) { unused_property_fields += JSObject::kFieldsAdded; } @@ -1984,16 +1827,19 @@ static MaybeObject* CopyAddFieldDescriptor(Map* map, } -static Handle<Map> CopyAddFieldDescriptor(Handle<Map> map, - Handle<Name> name, - int index, - PropertyAttributes attributes, - Representation representation, - TransitionFlag flag) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - CopyAddFieldDescriptor( - *map, *name, index, attributes, representation, flag), - Map); +MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map, + Handle<Name> name, + Handle<Object> constant, + PropertyAttributes attributes, + TransitionFlag flag) { + // Ensure the descriptor array does not get too big. + if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { + return MaybeHandle<Map>(); + } + + // Allocate new instance descriptors with (name, constant) added. + ConstantDescriptor new_constant_desc(name, constant, attributes); + return Map::CopyAddDescriptor(map, &new_constant_desc, flag); } @@ -2005,84 +1851,27 @@ void JSObject::AddFastProperty(Handle<JSObject> object, ValueType value_type, TransitionFlag flag) { ASSERT(!object->IsJSGlobalProxy()); - ASSERT(DescriptorArray::kNotFound == - object->map()->instance_descriptors()->Search( - *name, object->map()->NumberOfOwnDescriptors())); - // Normalize the object if the name is an actual name (not the - // hidden strings) and is not a real identifier. - // Normalize the object if it will have too many fast properties. - Isolate* isolate = object->GetIsolate(); - if (!name->IsCacheable(isolate) || - object->TooManyFastProperties(store_mode)) { - NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); - AddSlowProperty(object, name, value, attributes); - return; + MaybeHandle<Map> maybe_map; + if (value->IsJSFunction()) { + maybe_map = Map::CopyWithConstant( + handle(object->map()), name, value, attributes, flag); + } else if (!object->TooManyFastProperties(store_mode)) { + Isolate* isolate = object->GetIsolate(); + Representation representation = value->OptimalRepresentation(value_type); + maybe_map = Map::CopyWithField( + handle(object->map(), isolate), name, + value->OptimalType(isolate, representation), + attributes, representation, flag); } - // Compute the new index for new field. - int index = object->map()->NextFreePropertyIndex(); - - // Allocate new instance descriptors with (name, index) added - if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED; - Representation representation = value->OptimalRepresentation(value_type); - Handle<Map> new_map = CopyAddFieldDescriptor( - handle(object->map()), name, index, attributes, representation, flag); - - JSObject::MigrateToMap(object, new_map); - - if (representation.IsDouble()) { - // Nothing more to be done. - if (value->IsUninitialized()) return; - HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(index)); - box->set_value(value->Number()); - } else { - object->FastPropertyAtPut(index, *value); + Handle<Map> new_map; + if (!maybe_map.ToHandle(&new_map)) { + NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); + return; } -} - - -static MaybeObject* CopyAddConstantDescriptor(Map* map, - Name* name, - Object* value, - PropertyAttributes attributes, - TransitionFlag flag) { - ConstantDescriptor new_constant_desc(name, value, attributes); - return map->CopyAddDescriptor(&new_constant_desc, flag); -} - - -static Handle<Map> CopyAddConstantDescriptor(Handle<Map> map, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - TransitionFlag flag) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - CopyAddConstantDescriptor( - *map, *name, *value, attributes, flag), - Map); -} - - -void JSObject::AddConstantProperty(Handle<JSObject> object, - Handle<Name> name, - Handle<Object> constant, - PropertyAttributes attributes, - TransitionFlag initial_flag) { - TransitionFlag flag = - // Do not add transitions to global objects. - (object->IsGlobalObject() || - // Don't add transitions to special properties with non-trivial - // attributes. - attributes != NONE) - ? OMIT_TRANSITION - : initial_flag; - - // Allocate new instance descriptors with (name, constant) added. - Handle<Map> new_map = CopyAddConstantDescriptor( - handle(object->map()), name, constant, attributes, flag); - JSObject::MigrateToMap(object, new_map); + JSObject::MigrateToNewProperty(object, new_map, value); } @@ -2095,7 +1884,7 @@ void JSObject::AddSlowProperty(Handle<JSObject> object, Handle<NameDictionary> dict(object->property_dictionary()); if (object->IsGlobalObject()) { // In case name is an orphaned property reuse the cell. - int entry = dict->FindEntry(*name); + int entry = dict->FindEntry(name); if (entry != NameDictionary::kNotFound) { Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry))); PropertyCell::SetValueInferType(cell, value); @@ -2104,7 +1893,7 @@ void JSObject::AddSlowProperty(Handle<JSObject> object, int index = dict->NextEnumerationIndex(); PropertyDetails details = PropertyDetails(attributes, NORMAL, index); dict->SetNextEnumerationIndex(index + 1); - dict->SetEntry(entry, *name, *cell, details); + dict->SetEntry(entry, name, cell, details); return; } Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value); @@ -2112,21 +1901,23 @@ void JSObject::AddSlowProperty(Handle<JSObject> object, value = cell; } PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); - Handle<NameDictionary> result = NameDictionaryAdd(dict, name, value, details); + Handle<NameDictionary> result = + NameDictionary::Add(dict, name, value, details); if (*dict != *result) object->set_properties(*result); } -Handle<Object> JSObject::AddProperty(Handle<JSObject> object, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - JSReceiver::StoreFromKeyed store_mode, - ExtensibilityCheck extensibility_check, - ValueType value_type, - StoreMode mode, - TransitionFlag transition_flag) { +MaybeHandle<Object> JSObject::AddProperty( + Handle<JSObject> object, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + JSReceiver::StoreFromKeyed store_mode, + ExtensibilityCheck extensibility_check, + ValueType value_type, + StoreMode mode, + TransitionFlag transition_flag) { ASSERT(!object->IsJSGlobalProxy()); Isolate* isolate = object->GetIsolate(); @@ -2143,31 +1934,16 @@ Handle<Object> JSObject::AddProperty(Handle<JSObject> object, Handle<Object> args[1] = { name }; Handle<Object> error = isolate->factory()->NewTypeError( "object_not_extensible", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } if (object->HasFastProperties()) { - // Ensure the descriptor array does not get too big. - if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) { - // TODO(verwaest): Support other constants. - // if (mode == ALLOW_AS_CONSTANT && - // !value->IsTheHole() && - // !value->IsConsString()) { - if (value->IsJSFunction()) { - AddConstantProperty(object, name, value, attributes, transition_flag); - } else { - AddFastProperty(object, name, value, attributes, store_mode, - value_type, transition_flag); - } - } else { - // Normalize the object to prevent very large instance descriptors. - // This eliminates unwanted N^2 allocation and lookup behavior. - NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); - AddSlowProperty(object, name, value, attributes); - } - } else { + AddFastProperty(object, name, value, attributes, store_mode, + value_type, transition_flag); + } + + if (!object->HasFastProperties()) { AddSlowProperty(object, name, value, attributes); } @@ -2181,38 +1957,50 @@ Handle<Object> JSObject::AddProperty(Handle<JSObject> object, } +Context* JSObject::GetCreationContext() { + Object* constructor = this->map()->constructor(); + JSFunction* function; + if (!constructor->IsJSFunction()) { + // Functions have null as a constructor, + // but any JSFunction knows its context immediately. + function = JSFunction::cast(this); + } else { + function = JSFunction::cast(constructor); + } + + return function->context()->native_context(); +} + + void JSObject::EnqueueChangeRecord(Handle<JSObject> object, const char* type_str, Handle<Name> name, Handle<Object> old_value) { + ASSERT(!object->IsJSGlobalProxy()); + ASSERT(!object->IsJSGlobalObject()); Isolate* isolate = object->GetIsolate(); HandleScope scope(isolate); Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str); - if (object->IsJSGlobalObject()) { - object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate); - } Handle<Object> args[] = { type, object, name, old_value }; int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; - bool threw; Execution::Call(isolate, Handle<JSFunction>(isolate->observers_notify_change()), isolate->factory()->undefined_value(), - argc, args, - &threw); - ASSERT(!threw); + argc, args).Assert(); } -Handle<Object> JSObject::SetPropertyPostInterceptor( +MaybeHandle<Object> JSObject::SetPropertyPostInterceptor( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes, StrictMode strict_mode) { // Check local property, ignore interceptor. - LookupResult result(object->GetIsolate()); - object->LocalLookupRealNamedProperty(*name, &result); + Isolate* isolate = object->GetIsolate(); + LookupResult result(isolate); + object->LocalLookupRealNamedProperty(name, &result); if (!result.IsFound()) { object->map()->LookupTransition(*object, *name, &result); } @@ -2223,8 +2011,12 @@ Handle<Object> JSObject::SetPropertyPostInterceptor( strict_mode, MAY_BE_STORE_FROM_KEYED); } bool done = false; - Handle<Object> result_object = SetPropertyViaPrototypes( - object, name, value, attributes, strict_mode, &done); + Handle<Object> result_object; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result_object, + SetPropertyViaPrototypes( + object, name, value, attributes, strict_mode, &done), + Object); if (done) return result_object; // Add a new real property. return AddProperty(object, name, value, attributes, strict_mode); @@ -2236,7 +2028,7 @@ static void ReplaceSlowProperty(Handle<JSObject> object, Handle<Object> value, PropertyAttributes attributes) { NameDictionary* dictionary = object->property_dictionary(); - int old_index = dictionary->FindEntry(*name); + int old_index = dictionary->FindEntry(name); int new_enumeration_index = 0; // 0 means "Use the next available index." if (old_index != -1) { // All calls to ReplaceSlowProperty have had all transitions removed. @@ -2297,7 +2089,9 @@ static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { // we still do it. heap->CreateFillerObjectAt(new_end, size_delta); - elms->set_length(len - to_trim); + // We are storing the new length using release store after creating a filler + // for the left-over space to avoid races with the sweeper thread. + elms->synchronized_set_length(len - to_trim); heap->AdjustLiveBytes(elms->address(), -size_delta, mode); @@ -2346,6 +2140,18 @@ bool Map::InstancesNeedRewriting(Map* target, } +Handle<TransitionArray> Map::SetElementsTransitionMap( + Handle<Map> map, Handle<Map> transitioned_map) { + Handle<TransitionArray> transitions = TransitionArray::CopyInsert( + map, + map->GetIsolate()->factory()->elements_transition_symbol(), + transitioned_map, + FULL_TRANSITION); + map->set_transitions(*transitions); + return transitions; +} + + // To migrate an instance to a map: // - First check whether the instance needs to be rewritten. If not, simply // change the map. @@ -2372,7 +2178,9 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { // converted to doubles. if (!old_map->InstancesNeedRewriting( *new_map, number_of_fields, inobject, unused)) { - object->set_map(*new_map); + // Writing the new map here does not require synchronization since it does + // not change the actual object size. + object->synchronized_set_map(*new_map); return; } @@ -2408,7 +2216,7 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { if (old_details.representation().IsNone()) { value = handle(Smi::FromInt(0), isolate); } - value = NewStorageFor(isolate, value, details.representation()); + value = Object::NewStorageFor(isolate, value, details.representation()); } ASSERT(!(details.representation().IsDouble() && value->IsSmi())); int target_index = new_descriptors->GetFieldIndex(i) - inobject; @@ -2419,12 +2227,15 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { for (int i = old_nof; i < new_nof; i++) { PropertyDetails details = new_descriptors->GetDetails(i); if (details.type() != FIELD) continue; + Handle<Object> value; if (details.representation().IsDouble()) { - int target_index = new_descriptors->GetFieldIndex(i) - inobject; - if (target_index < 0) target_index += total_size; - Handle<Object> box = isolate->factory()->NewHeapNumber(0); - array->set(target_index, *box); + value = isolate->factory()->NewHeapNumber(0); + } else { + value = isolate->factory()->uninitialized_value(); } + int target_index = new_descriptors->GetFieldIndex(i) - inobject; + if (target_index < 0) target_index += total_size; + array->set(target_index, *value); } // From here on we cannot fail and we shouldn't GC anymore. @@ -2442,6 +2253,11 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { int instance_size_delta = old_map->instance_size() - new_instance_size; ASSERT(instance_size_delta >= 0); Address address = object->address() + new_instance_size; + + // The trimming is performed on a newly allocated object, which is on a + // fresly allocated page or on an already swept page. Hence, the sweeper + // thread can not get confused with the filler creation. No synchronization + // needed. isolate->heap()->CreateFillerObjectAt(address, instance_size_delta); // If there are properties in the new backing store, trim it to the correct @@ -2451,26 +2267,22 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { object->set_properties(*array); } + // The trimming is performed on a newly allocated object, which is on a + // fresly allocated page or on an already swept page. Hence, the sweeper + // thread can not get confused with the filler creation. No synchronization + // needed. object->set_map(*new_map); } -Handle<TransitionArray> Map::AddTransition(Handle<Map> map, - Handle<Name> key, - Handle<Map> target, - SimpleTransitionFlag flag) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - map->AddTransition(*key, *target, flag), - TransitionArray); -} - - void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object, int modify_index, Representation new_representation, + Handle<HeapType> new_field_type, StoreMode store_mode) { Handle<Map> new_map = Map::GeneralizeRepresentation( - handle(object->map()), modify_index, new_representation, store_mode); + handle(object->map()), modify_index, new_representation, + new_field_type, store_mode); if (object->map() == *new_map) return; return MigrateToMap(object, new_map); } @@ -2491,20 +2303,26 @@ Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, StoreMode store_mode, PropertyAttributes attributes, const char* reason) { + Isolate* isolate = map->GetIsolate(); Handle<Map> new_map = Copy(map); DescriptorArray* descriptors = new_map->instance_descriptors(); - descriptors->InitializeRepresentations(Representation::Tagged()); + int length = descriptors->number_of_descriptors(); + for (int i = 0; i < length; i++) { + descriptors->SetRepresentation(i, Representation::Tagged()); + if (descriptors->GetDetails(i).type() == FIELD) { + descriptors->SetValue(i, HeapType::Any()); + } + } // Unless the instance is being migrated, ensure that modify_index is a field. PropertyDetails details = descriptors->GetDetails(modify_index); if (store_mode == FORCE_FIELD && details.type() != FIELD) { - FieldDescriptor d(descriptors->GetKey(modify_index), + FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate), new_map->NumberOfFields(), attributes, Representation::Tagged()); - d.SetSortedKeyIndex(details.pointer()); - descriptors->Set(modify_index, &d); + descriptors->Replace(modify_index, &d); int unused_property_fields = new_map->unused_property_fields() - 1; if (unused_property_fields < 0) { unused_property_fields += JSObject::kFieldsAdded; @@ -2513,11 +2331,15 @@ Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, } if (FLAG_trace_generalization) { + HeapType* field_type = (details.type() == FIELD) + ? map->instance_descriptors()->GetFieldType(modify_index) + : NULL; map->PrintGeneralization(stdout, reason, modify_index, new_map->NumberOfOwnDescriptors(), new_map->NumberOfOwnDescriptors(), details.type() == CONSTANT && store_mode == FORCE_FIELD, - Representation::Tagged(), Representation::Tagged()); + details.representation(), Representation::Tagged(), + field_type, HeapType::Any()); } return new_map; } @@ -2578,44 +2400,11 @@ Map* Map::FindRootMap() { } -// Returns NULL if the updated map is incompatible. -Map* Map::FindUpdatedMap(int verbatim, - int length, - DescriptorArray* descriptors) { - // This can only be called on roots of transition trees. - ASSERT(GetBackPointer()->IsUndefined()); - - Map* current = this; - - for (int i = verbatim; i < length; i++) { - if (!current->HasTransitionArray()) break; - Name* name = descriptors->GetKey(i); - TransitionArray* transitions = current->transitions(); - int transition = transitions->Search(name); - if (transition == TransitionArray::kNotFound) break; - current = transitions->GetTarget(transition); - PropertyDetails details = descriptors->GetDetails(i); - PropertyDetails target_details = - current->instance_descriptors()->GetDetails(i); - if (details.attributes() != target_details.attributes()) return NULL; - if (details.type() == CALLBACKS) { - if (target_details.type() != CALLBACKS) return NULL; - if (descriptors->GetValue(i) != - current->instance_descriptors()->GetValue(i)) { - return NULL; - } - } else if (target_details.type() == CALLBACKS) { - return NULL; - } - } - - return current; -} - - Map* Map::FindLastMatchMap(int verbatim, int length, DescriptorArray* descriptors) { + DisallowHeapAllocation no_allocation; + // This can only be called on roots of transition trees. ASSERT(GetBackPointer()->IsUndefined()); @@ -2631,13 +2420,17 @@ Map* Map::FindLastMatchMap(int verbatim, Map* next = transitions->GetTarget(transition); DescriptorArray* next_descriptors = next->instance_descriptors(); - if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break; - PropertyDetails details = descriptors->GetDetails(i); PropertyDetails next_details = next_descriptors->GetDetails(i); if (details.type() != next_details.type()) break; if (details.attributes() != next_details.attributes()) break; if (!details.representation().Equals(next_details.representation())) break; + if (next_details.type() == FIELD) { + if (!descriptors->GetFieldType(i)->NowIs( + next_descriptors->GetFieldType(i))) break; + } else { + if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break; + } current = next; } @@ -2645,6 +2438,100 @@ Map* Map::FindLastMatchMap(int verbatim, } +Map* Map::FindFieldOwner(int descriptor) { + DisallowHeapAllocation no_allocation; + ASSERT_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type()); + Map* result = this; + while (true) { + Object* back = result->GetBackPointer(); + if (back->IsUndefined()) break; + Map* parent = Map::cast(back); + if (parent->NumberOfOwnDescriptors() <= descriptor) break; + result = parent; + } + return result; +} + + +void Map::UpdateDescriptor(int descriptor_number, Descriptor* desc) { + DisallowHeapAllocation no_allocation; + if (HasTransitionArray()) { + TransitionArray* transitions = this->transitions(); + for (int i = 0; i < transitions->number_of_transitions(); ++i) { + transitions->GetTarget(i)->UpdateDescriptor(descriptor_number, desc); + } + } + instance_descriptors()->Replace(descriptor_number, desc);; +} + + +// static +Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1, + Handle<HeapType> type2, + Isolate* isolate) { + static const int kMaxClassesPerFieldType = 5; + if (type1->NowIs(type2)) return type2; + if (type2->NowIs(type1)) return type1; + if (type1->NowStable() && type2->NowStable()) { + Handle<HeapType> type = HeapType::Union(type1, type2, isolate); + if (type->NumClasses() <= kMaxClassesPerFieldType) { + ASSERT(type->NowStable()); + ASSERT(type1->NowIs(type)); + ASSERT(type2->NowIs(type)); + return type; + } + } + return HeapType::Any(isolate); +} + + +// static +void Map::GeneralizeFieldType(Handle<Map> map, + int modify_index, + Handle<HeapType> new_field_type) { + Isolate* isolate = map->GetIsolate(); + + // Check if we actually need to generalize the field type at all. + Handle<HeapType> old_field_type( + map->instance_descriptors()->GetFieldType(modify_index), isolate); + if (new_field_type->NowIs(old_field_type)) { + ASSERT(Map::GeneralizeFieldType(old_field_type, + new_field_type, + isolate)->NowIs(old_field_type)); + return; + } + + // Determine the field owner. + Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate); + Handle<DescriptorArray> descriptors( + field_owner->instance_descriptors(), isolate); + ASSERT_EQ(*old_field_type, descriptors->GetFieldType(modify_index)); + + // Determine the generalized new field type. + new_field_type = Map::GeneralizeFieldType( + old_field_type, new_field_type, isolate); + + PropertyDetails details = descriptors->GetDetails(modify_index); + FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate), + descriptors->GetFieldIndex(modify_index), + new_field_type, + details.attributes(), + details.representation()); + field_owner->UpdateDescriptor(modify_index, &d); + field_owner->dependent_code()->DeoptimizeDependentCodeGroup( + isolate, DependentCode::kFieldTypeGroup); + + if (FLAG_trace_generalization) { + map->PrintGeneralization( + stdout, "field type generalization", + modify_index, map->NumberOfOwnDescriptors(), + map->NumberOfOwnDescriptors(), false, + details.representation(), details.representation(), + *old_field_type, *new_field_type); + } +} + + // Generalize the representation of the descriptor at |modify_index|. // This method rewrites the transition tree to reflect the new change. To avoid // high degrees over polymorphism, and to stabilize quickly, on every rewrite @@ -2652,22 +2539,28 @@ Map* Map::FindLastMatchMap(int verbatim, // (partial) version of the type in the transition tree. // To do this, on each rewrite: // - Search the root of the transition tree using FindRootMap. -// - Find |updated|, the newest matching version of this map using -// FindUpdatedMap. This uses the keys in the own map's descriptor array to -// walk the transition tree. -// - Merge/generalize the descriptor array of the current map and |updated|. -// - Generalize the |modify_index| descriptor using |new_representation|. -// - Walk the tree again starting from the root towards |updated|. Stop at +// - Find |target_map|, the newest matching version of this map using the keys +// in the |old_map|'s descriptor array to walk the transition tree. +// - Merge/generalize the descriptor array of the |old_map| and |target_map|. +// - Generalize the |modify_index| descriptor using |new_representation| and +// |new_field_type|. +// - Walk the tree again starting from the root towards |target_map|. Stop at // |split_map|, the first map who's descriptor array does not match the merged // descriptor array. -// - If |updated| == |split_map|, |updated| is in the expected state. Return it. -// - Otherwise, invalidate the outdated transition target from |updated|, and +// - If |target_map| == |split_map|, |target_map| is in the expected state. +// Return it. +// - Otherwise, invalidate the outdated transition target from |target_map|, and // replace its transition tree with a new branch for the updated descriptors. Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, int modify_index, Representation new_representation, + Handle<HeapType> new_field_type, StoreMode store_mode) { - Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors()); + Isolate* isolate = old_map->GetIsolate(); + + Handle<DescriptorArray> old_descriptors( + old_map->instance_descriptors(), isolate); + int old_nof = old_map->NumberOfOwnDescriptors(); PropertyDetails old_details = old_descriptors->GetDetails(modify_index); Representation old_representation = old_details.representation(); @@ -2678,89 +2571,279 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, if (old_representation.IsNone() && !new_representation.IsNone() && !new_representation.IsDouble()) { + ASSERT(old_details.type() == FIELD); + ASSERT(old_descriptors->GetFieldType(modify_index)->NowIs( + HeapType::None())); + if (FLAG_trace_generalization) { + old_map->PrintGeneralization( + stdout, "uninitialized field", + modify_index, old_map->NumberOfOwnDescriptors(), + old_map->NumberOfOwnDescriptors(), false, + old_representation, new_representation, + old_descriptors->GetFieldType(modify_index), *new_field_type); + } old_descriptors->SetRepresentation(modify_index, new_representation); + old_descriptors->SetValue(modify_index, *new_field_type); return old_map; } - int descriptors = old_map->NumberOfOwnDescriptors(); - Handle<Map> root_map(old_map->FindRootMap()); - // Check the state of the root map. + Handle<Map> root_map(old_map->FindRootMap(), isolate); if (!old_map->EquivalentToForTransition(*root_map)) { return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, old_details.attributes(), "not equivalent"); } + int root_nof = root_map->NumberOfOwnDescriptors(); + if (modify_index < root_nof) { + PropertyDetails old_details = old_descriptors->GetDetails(modify_index); + if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) || + (old_details.type() == FIELD && + (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) || + !new_representation.fits_into(old_details.representation())))) { + return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, + old_details.attributes(), "root modification"); + } + } - int verbatim = root_map->NumberOfOwnDescriptors(); + Handle<Map> target_map = root_map; + for (int i = root_nof; i < old_nof; ++i) { + int j = target_map->SearchTransition(old_descriptors->GetKey(i)); + if (j == TransitionArray::kNotFound) break; + Handle<Map> tmp_map(target_map->GetTransition(j), isolate); + Handle<DescriptorArray> tmp_descriptors = handle( + tmp_map->instance_descriptors(), isolate); - if (store_mode != ALLOW_AS_CONSTANT && modify_index < verbatim) { - return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, - old_details.attributes(), "root modification"); + // Check if target map is incompatible. + PropertyDetails old_details = old_descriptors->GetDetails(i); + PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); + PropertyType old_type = old_details.type(); + PropertyType tmp_type = tmp_details.type(); + if (tmp_details.attributes() != old_details.attributes() || + ((tmp_type == CALLBACKS || old_type == CALLBACKS) && + (tmp_type != old_type || + tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { + return CopyGeneralizeAllRepresentations( + old_map, modify_index, store_mode, + old_details.attributes(), "incompatible"); + } + Representation old_representation = old_details.representation(); + Representation tmp_representation = tmp_details.representation(); + if (!old_representation.fits_into(tmp_representation) || + (!new_representation.fits_into(tmp_representation) && + modify_index == i)) { + break; + } + if (tmp_type == FIELD) { + // Generalize the field type as necessary. + Handle<HeapType> old_field_type = (old_type == FIELD) + ? handle(old_descriptors->GetFieldType(i), isolate) + : old_descriptors->GetValue(i)->OptimalType( + isolate, tmp_representation); + if (modify_index == i) { + old_field_type = GeneralizeFieldType( + new_field_type, old_field_type, isolate); + } + GeneralizeFieldType(tmp_map, i, old_field_type); + } else if (tmp_type == CONSTANT) { + if (old_type != CONSTANT || + old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) { + break; + } + } else { + ASSERT_EQ(tmp_type, old_type); + ASSERT_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i)); + } + target_map = tmp_map; + } + + // Directly change the map if the target map is more general. + Handle<DescriptorArray> target_descriptors( + target_map->instance_descriptors(), isolate); + int target_nof = target_map->NumberOfOwnDescriptors(); + if (target_nof == old_nof && + (store_mode != FORCE_FIELD || + target_descriptors->GetDetails(modify_index).type() == FIELD)) { + ASSERT(modify_index < target_nof); + ASSERT(new_representation.fits_into( + target_descriptors->GetDetails(modify_index).representation())); + ASSERT(target_descriptors->GetDetails(modify_index).type() != FIELD || + new_field_type->NowIs( + target_descriptors->GetFieldType(modify_index))); + return target_map; + } + + // Find the last compatible target map in the transition tree. + for (int i = target_nof; i < old_nof; ++i) { + int j = target_map->SearchTransition(old_descriptors->GetKey(i)); + if (j == TransitionArray::kNotFound) break; + Handle<Map> tmp_map(target_map->GetTransition(j), isolate); + Handle<DescriptorArray> tmp_descriptors( + tmp_map->instance_descriptors(), isolate); + + // Check if target map is compatible. + PropertyDetails old_details = old_descriptors->GetDetails(i); + PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); + if (tmp_details.attributes() != old_details.attributes() || + ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) && + (tmp_details.type() != old_details.type() || + tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { + return CopyGeneralizeAllRepresentations( + old_map, modify_index, store_mode, + old_details.attributes(), "incompatible"); + } + target_map = tmp_map; } + target_nof = target_map->NumberOfOwnDescriptors(); + target_descriptors = handle(target_map->instance_descriptors(), isolate); - Map* raw_updated = root_map->FindUpdatedMap( - verbatim, descriptors, *old_descriptors); - if (raw_updated == NULL) { - return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, - old_details.attributes(), "incompatible"); + // Allocate a new descriptor array large enough to hold the required + // descriptors, with minimally the exact same size as the old descriptor + // array. + int new_slack = Max( + old_nof, old_descriptors->number_of_descriptors()) - old_nof; + Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate( + isolate, old_nof, new_slack); + ASSERT(new_descriptors->length() > target_descriptors->length() || + new_descriptors->NumberOfSlackDescriptors() > 0 || + new_descriptors->number_of_descriptors() == + old_descriptors->number_of_descriptors()); + ASSERT(new_descriptors->number_of_descriptors() == old_nof); + + // 0 -> |root_nof| + int current_offset = 0; + for (int i = 0; i < root_nof; ++i) { + PropertyDetails old_details = old_descriptors->GetDetails(i); + if (old_details.type() == FIELD) current_offset++; + Descriptor d(handle(old_descriptors->GetKey(i), isolate), + handle(old_descriptors->GetValue(i), isolate), + old_details); + new_descriptors->Set(i, &d); } - Handle<Map> updated(raw_updated); - Handle<DescriptorArray> updated_descriptors(updated->instance_descriptors()); - - int valid = updated->NumberOfOwnDescriptors(); + // |root_nof| -> |target_nof| + for (int i = root_nof; i < target_nof; ++i) { + Handle<Name> target_key(target_descriptors->GetKey(i), isolate); + PropertyDetails old_details = old_descriptors->GetDetails(i); + PropertyDetails target_details = target_descriptors->GetDetails(i); + target_details = target_details.CopyWithRepresentation( + old_details.representation().generalize( + target_details.representation())); + if (modify_index == i) { + target_details = target_details.CopyWithRepresentation( + new_representation.generalize(target_details.representation())); + } + if (old_details.type() == FIELD || + target_details.type() == FIELD || + (modify_index == i && store_mode == FORCE_FIELD) || + (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) { + Handle<HeapType> old_field_type = (old_details.type() == FIELD) + ? handle(old_descriptors->GetFieldType(i), isolate) + : old_descriptors->GetValue(i)->OptimalType( + isolate, target_details.representation()); + Handle<HeapType> target_field_type = (target_details.type() == FIELD) + ? handle(target_descriptors->GetFieldType(i), isolate) + : target_descriptors->GetValue(i)->OptimalType( + isolate, target_details.representation()); + target_field_type = GeneralizeFieldType( + target_field_type, old_field_type, isolate); + if (modify_index == i) { + target_field_type = GeneralizeFieldType( + target_field_type, new_field_type, isolate); + } + FieldDescriptor d(target_key, + current_offset++, + target_field_type, + target_details.attributes(), + target_details.representation()); + new_descriptors->Set(i, &d); + } else { + ASSERT_NE(FIELD, target_details.type()); + Descriptor d(target_key, + handle(target_descriptors->GetValue(i), isolate), + target_details); + new_descriptors->Set(i, &d); + } + } - // Directly change the map if the target map is more general. Ensure that the - // target type of the modify_index is a FIELD, unless we are migrating. - if (updated_descriptors->IsMoreGeneralThan( - verbatim, valid, descriptors, *old_descriptors) && - (store_mode == ALLOW_AS_CONSTANT || - updated_descriptors->GetDetails(modify_index).type() == FIELD)) { - Representation updated_representation = - updated_descriptors->GetDetails(modify_index).representation(); - if (new_representation.fits_into(updated_representation)) return updated; + // |target_nof| -> |old_nof| + for (int i = target_nof; i < old_nof; ++i) { + PropertyDetails old_details = old_descriptors->GetDetails(i); + Handle<Name> old_key(old_descriptors->GetKey(i), isolate); + if (modify_index == i) { + old_details = old_details.CopyWithRepresentation( + new_representation.generalize(old_details.representation())); + } + if (old_details.type() == FIELD) { + Handle<HeapType> old_field_type( + old_descriptors->GetFieldType(i), isolate); + if (modify_index == i) { + old_field_type = GeneralizeFieldType( + old_field_type, new_field_type, isolate); + } + FieldDescriptor d(old_key, + current_offset++, + old_field_type, + old_details.attributes(), + old_details.representation()); + new_descriptors->Set(i, &d); + } else { + ASSERT(old_details.type() == CONSTANT || old_details.type() == CALLBACKS); + if (modify_index == i && store_mode == FORCE_FIELD) { + FieldDescriptor d(old_key, + current_offset++, + GeneralizeFieldType( + old_descriptors->GetValue(i)->OptimalType( + isolate, old_details.representation()), + new_field_type, isolate), + old_details.attributes(), + old_details.representation()); + new_descriptors->Set(i, &d); + } else { + ASSERT_NE(FIELD, old_details.type()); + Descriptor d(old_key, + handle(old_descriptors->GetValue(i), isolate), + old_details); + new_descriptors->Set(i, &d); + } + } } - Handle<DescriptorArray> new_descriptors = DescriptorArray::Merge( - updated_descriptors, verbatim, valid, descriptors, modify_index, - store_mode, old_descriptors); - ASSERT(store_mode == ALLOW_AS_CONSTANT || - new_descriptors->GetDetails(modify_index).type() == FIELD); + new_descriptors->Sort(); - old_representation = - new_descriptors->GetDetails(modify_index).representation(); - Representation updated_representation = - new_representation.generalize(old_representation); - if (!updated_representation.Equals(old_representation)) { - new_descriptors->SetRepresentation(modify_index, updated_representation); - } + ASSERT(store_mode != FORCE_FIELD || + new_descriptors->GetDetails(modify_index).type() == FIELD); Handle<Map> split_map(root_map->FindLastMatchMap( - verbatim, descriptors, *new_descriptors)); + root_nof, old_nof, *new_descriptors), isolate); + int split_nof = split_map->NumberOfOwnDescriptors(); + ASSERT_NE(old_nof, split_nof); - int split_descriptors = split_map->NumberOfOwnDescriptors(); - // This is shadowed by |updated_descriptors| being more general than - // |old_descriptors|. - ASSERT(descriptors != split_descriptors); - - int descriptor = split_descriptors; split_map->DeprecateTarget( - old_descriptors->GetKey(descriptor), *new_descriptors); + old_descriptors->GetKey(split_nof), *new_descriptors); if (FLAG_trace_generalization) { + PropertyDetails old_details = old_descriptors->GetDetails(modify_index); + PropertyDetails new_details = new_descriptors->GetDetails(modify_index); + Handle<HeapType> old_field_type = (old_details.type() == FIELD) + ? handle(old_descriptors->GetFieldType(modify_index), isolate) + : HeapType::Constant(handle(old_descriptors->GetValue(modify_index), + isolate), isolate); + Handle<HeapType> new_field_type = (new_details.type() == FIELD) + ? handle(new_descriptors->GetFieldType(modify_index), isolate) + : HeapType::Constant(handle(new_descriptors->GetValue(modify_index), + isolate), isolate); old_map->PrintGeneralization( - stdout, "", modify_index, descriptor, descriptors, - old_descriptors->GetDetails(modify_index).type() == CONSTANT && - store_mode == FORCE_FIELD, - old_representation, updated_representation); + stdout, "", modify_index, split_nof, old_nof, + old_details.type() == CONSTANT && store_mode == FORCE_FIELD, + old_details.representation(), new_details.representation(), + *old_field_type, *new_field_type); } // Add missing transitions. Handle<Map> new_map = split_map; - for (; descriptor < descriptors; descriptor++) { - new_map = Map::CopyInstallDescriptors(new_map, descriptor, new_descriptors); + for (int i = split_nof; i < old_nof; ++i) { + new_map = CopyInstallDescriptors(new_map, i, new_descriptors); } - new_map->set_owns_descriptors(true); return new_map; } @@ -2768,61 +2851,97 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, // Generalize the representation of all FIELD descriptors. Handle<Map> Map::GeneralizeAllFieldRepresentations( - Handle<Map> map, - Representation new_representation) { + Handle<Map> map) { Handle<DescriptorArray> descriptors(map->instance_descriptors()); - for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) { - PropertyDetails details = descriptors->GetDetails(i); - if (details.type() == FIELD) { - map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD); + for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) { + if (descriptors->GetDetails(i).type() == FIELD) { + map = GeneralizeRepresentation(map, i, Representation::Tagged(), + HeapType::Any(map->GetIsolate()), + FORCE_FIELD); } } return map; } -Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) { +// static +MaybeHandle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) { Handle<Map> proto_map(map); while (proto_map->prototype()->IsJSObject()) { Handle<JSObject> holder(JSObject::cast(proto_map->prototype())); - if (holder->map()->is_deprecated()) { - JSObject::TryMigrateInstance(holder); - } proto_map = Handle<Map>(holder->map()); + if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) { + proto_map = Handle<Map>(holder->map()); + } } return CurrentMapForDeprecatedInternal(map); } -Handle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> map) { - if (!map->is_deprecated()) return map; - +// static +MaybeHandle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> old_map) { DisallowHeapAllocation no_allocation; - DescriptorArray* old_descriptors = map->instance_descriptors(); + DisallowDeoptimization no_deoptimization(old_map->GetIsolate()); - int descriptors = map->NumberOfOwnDescriptors(); - Map* root_map = map->FindRootMap(); + if (!old_map->is_deprecated()) return old_map; // Check the state of the root map. - if (!map->EquivalentToForTransition(root_map)) return Handle<Map>(); - int verbatim = root_map->NumberOfOwnDescriptors(); + Map* root_map = old_map->FindRootMap(); + if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>(); + int root_nof = root_map->NumberOfOwnDescriptors(); + + int old_nof = old_map->NumberOfOwnDescriptors(); + DescriptorArray* old_descriptors = old_map->instance_descriptors(); - Map* updated = root_map->FindUpdatedMap( - verbatim, descriptors, old_descriptors); - if (updated == NULL) return Handle<Map>(); + Map* new_map = root_map; + for (int i = root_nof; i < old_nof; ++i) { + int j = new_map->SearchTransition(old_descriptors->GetKey(i)); + if (j == TransitionArray::kNotFound) return MaybeHandle<Map>(); + new_map = new_map->GetTransition(j); + DescriptorArray* new_descriptors = new_map->instance_descriptors(); - DescriptorArray* updated_descriptors = updated->instance_descriptors(); - int valid = updated->NumberOfOwnDescriptors(); - if (!updated_descriptors->IsMoreGeneralThan( - verbatim, valid, descriptors, old_descriptors)) { - return Handle<Map>(); - } + PropertyDetails new_details = new_descriptors->GetDetails(i); + PropertyDetails old_details = old_descriptors->GetDetails(i); + if (old_details.attributes() != new_details.attributes() || + !old_details.representation().fits_into(new_details.representation())) { + return MaybeHandle<Map>(); + } + PropertyType new_type = new_details.type(); + PropertyType old_type = old_details.type(); + Object* new_value = new_descriptors->GetValue(i); + Object* old_value = old_descriptors->GetValue(i); + switch (new_type) { + case FIELD: + if ((old_type == FIELD && + !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) || + (old_type == CONSTANT && + !HeapType::cast(new_value)->NowContains(old_value)) || + (old_type == CALLBACKS && + !HeapType::Any()->Is(HeapType::cast(new_value)))) { + return MaybeHandle<Map>(); + } + break; - return handle(updated); + case CONSTANT: + case CALLBACKS: + if (old_type != new_type || old_value != new_value) { + return MaybeHandle<Map>(); + } + break; + + case NORMAL: + case HANDLER: + case INTERCEPTOR: + case NONEXISTENT: + UNREACHABLE(); + } + } + if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>(); + return handle(new_map); } -Handle<Object> JSObject::SetPropertyWithInterceptor( +MaybeHandle<Object> JSObject::SetPropertyWithInterceptor( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, @@ -2845,24 +2964,22 @@ Handle<Object> JSObject::SetPropertyWithInterceptor( v8::Handle<v8::Value> result = args.Call(setter, v8::Utils::ToLocal(name_string), v8::Utils::ToLocal(value_unhole)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) return value; } - Handle<Object> result = - SetPropertyPostInterceptor(object, name, value, attributes, strict_mode); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); - return result; + return SetPropertyPostInterceptor( + object, name, value, attributes, strict_mode); } -Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - StoreFromKeyed store_mode) { +MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + StoreFromKeyed store_mode) { LookupResult result(object->GetIsolate()); - object->LocalLookup(*name, &result, true); + object->LocalLookup(name, &result, true); if (!result.IsFound()) { object->map()->LookupTransition(JSObject::cast(*object), *name, &result); } @@ -2871,34 +2988,18 @@ Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, } -Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, - Handle<Object> structure, - Handle<Name> name, - Handle<Object> value, - Handle<JSObject> holder, - StrictMode strict_mode) { +MaybeHandle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, + Handle<Object> structure, + Handle<Name> name, + Handle<Object> value, + Handle<JSObject> holder, + StrictMode strict_mode) { Isolate* isolate = object->GetIsolate(); // We should never get here to initialize a const with the hole // value since a const declaration would conflict with the setter. ASSERT(!value->IsTheHole()); - - // To accommodate both the old and the new api we switch on the - // data structure used to store the callbacks. Eventually foreign - // callbacks should be phased out. - if (structure->IsForeign()) { - AccessorDescriptor* callback = - reinterpret_cast<AccessorDescriptor*>( - Handle<Foreign>::cast(structure)->foreign_address()); - CALL_AND_RETRY_OR_DIE(isolate, - (callback->setter)( - isolate, *object, *value, callback->data), - break, - return Handle<Object>()); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); - return value; - } - + ASSERT(!structure->IsForeign()); if (structure->IsExecutableAccessorInfo()) { // api style callbacks ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure); @@ -2908,8 +3009,7 @@ Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, isolate->factory()->NewTypeError("incompatible_method_receiver", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } // TODO(rossberg): Support symbols in the API. if (name->IsSymbol()) return value; @@ -2919,12 +3019,11 @@ Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, if (call_fun == NULL) return value; Handle<String> key = Handle<String>::cast(name); LOG(isolate, ApiNamedPropertyAccess("store", *object, *name)); - PropertyCallbackArguments args( - isolate, data->data(), *object, JSObject::cast(*holder)); + PropertyCallbackArguments args(isolate, data->data(), *object, *holder); args.Call(call_fun, v8::Utils::ToLocal(key), v8::Utils::ToLocal(value)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return value; } @@ -2940,8 +3039,7 @@ Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, Handle<Object> error = isolate->factory()->NewTypeError("no_setter_in_callback", HandleVector(args, 2)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } @@ -2951,17 +3049,16 @@ Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, } UNREACHABLE(); - return Handle<Object>(); + return MaybeHandle<Object>(); } -Handle<Object> JSReceiver::SetPropertyWithDefinedSetter( +MaybeHandle<Object> JSReceiver::SetPropertyWithDefinedSetter( Handle<JSReceiver> object, Handle<JSReceiver> setter, Handle<Object> value) { Isolate* isolate = object->GetIsolate(); -#ifdef ENABLE_DEBUGGER_SUPPORT Debug* debug = isolate->debug(); // Handle stepping into a setter if step into is active. // TODO(rossberg): should this apply to getters that are function proxies? @@ -2969,19 +3066,17 @@ Handle<Object> JSReceiver::SetPropertyWithDefinedSetter( debug->HandleStepIn( Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false); } -#endif - bool has_pending_exception; Handle<Object> argv[] = { value }; - Execution::Call( - isolate, setter, object, ARRAY_SIZE(argv), argv, &has_pending_exception); - // Check for pending exception and return the result. - if (has_pending_exception) return Handle<Object>(); + RETURN_ON_EXCEPTION( + isolate, + Execution::Call(isolate, setter, object, ARRAY_SIZE(argv), argv), + Object); return value; } -Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( +MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -3022,12 +3117,13 @@ Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( } -Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - bool* done) { +MaybeHandle<Object> JSObject::SetPropertyViaPrototypes( + Handle<JSObject> object, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + bool* done) { Isolate* isolate = object->GetIsolate(); *done = false; @@ -3035,7 +3131,7 @@ Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object, // accessor that wants to handle the property, or whether the property is // read-only on the prototype chain. LookupResult result(isolate); - object->LookupRealNamedPropertyInPrototypes(*name, &result); + object->LookupRealNamedPropertyInPrototypes(name, &result); if (result.IsFound()) { switch (result.type()) { case NORMAL: @@ -3051,16 +3147,18 @@ Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object, } case CALLBACKS: { *done = true; - Handle<Object> callback_object(result.GetCallbackObject(), isolate); - return SetPropertyWithCallback(object, callback_object, name, value, - handle(result.holder()), strict_mode); + if (!result.IsReadOnly()) { + Handle<Object> callback_object(result.GetCallbackObject(), isolate); + return SetPropertyWithCallback(object, callback_object, name, value, + handle(result.holder()), strict_mode); + } + break; } case HANDLER: { Handle<JSProxy> proxy(result.proxy()); return JSProxy::SetPropertyViaPrototypesWithHandler( proxy, object, name, value, attributes, strict_mode, done); } - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; @@ -3073,24 +3171,47 @@ Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object, Handle<Object> args[] = { name, object }; Handle<Object> error = isolate->factory()->NewTypeError( "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } return isolate->factory()->the_hole_value(); } void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) { + // Only supports adding slack to owned descriptors. + ASSERT(map->owns_descriptors()); + Handle<DescriptorArray> descriptors(map->instance_descriptors()); + int old_size = map->NumberOfOwnDescriptors(); if (slack <= descriptors->NumberOfSlackDescriptors()) return; - int number_of_descriptors = descriptors->number_of_descriptors(); - Isolate* isolate = map->GetIsolate(); - Handle<DescriptorArray> new_descriptors = - isolate->factory()->NewDescriptorArray(number_of_descriptors, slack); - DescriptorArray::WhitenessWitness witness(*new_descriptors); - for (int i = 0; i < number_of_descriptors; ++i) { - new_descriptors->CopyFrom(i, *descriptors, i, witness); + Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( + descriptors, old_size, slack); + + if (old_size == 0) { + map->set_instance_descriptors(*new_descriptors); + return; + } + + // If the source descriptors had an enum cache we copy it. This ensures + // that the maps to which we push the new descriptor array back can rely + // on a cache always being available once it is set. If the map has more + // enumerated descriptors than available in the original cache, the cache + // will be lazily replaced by the extended cache when needed. + if (descriptors->HasEnumCache()) { + new_descriptors->CopyEnumCacheFrom(*descriptors); + } + + // Replace descriptors by new_descriptors in all maps that share it. + map->GetHeap()->incremental_marking()->RecordWrites(*descriptors); + + Map* walk_map; + for (Object* current = map->GetBackPointer(); + !current->IsUndefined(); + current = walk_map->GetBackPointer()) { + walk_map = Map::cast(current); + if (walk_map->instance_descriptors() != *descriptors) break; + walk_map->set_instance_descriptors(*new_descriptors); } map->set_instance_descriptors(*new_descriptors); @@ -3120,8 +3241,8 @@ static int AppendUniqueCallbacks(NeanderArray* callbacks, // back to front so that the last callback with a given name takes // precedence over previously added callbacks with that name. for (int i = nof_callbacks - 1; i >= 0; i--) { - AccessorInfo* entry = AccessorInfo::cast(callbacks->get(i)); - Name* key = Name::cast(entry->name()); + Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); + Handle<Name> key(Name::cast(entry->name())); // Check if a descriptor with this name already exists before writing. if (!T::Contains(key, entry, valid_descriptors, array)) { T::Insert(key, entry, valid_descriptors, array); @@ -3134,16 +3255,18 @@ static int AppendUniqueCallbacks(NeanderArray* callbacks, struct DescriptorArrayAppender { typedef DescriptorArray Array; - static bool Contains(Name* key, - AccessorInfo* entry, + static bool Contains(Handle<Name> key, + Handle<AccessorInfo> entry, int valid_descriptors, Handle<DescriptorArray> array) { - return array->Search(key, valid_descriptors) != DescriptorArray::kNotFound; + DisallowHeapAllocation no_gc; + return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound; } - static void Insert(Name* key, - AccessorInfo* entry, + static void Insert(Handle<Name> key, + Handle<AccessorInfo> entry, int valid_descriptors, Handle<DescriptorArray> array) { + DisallowHeapAllocation no_gc; CallbacksDescriptor desc(key, entry, entry->property_attributes()); array->Append(&desc); } @@ -3152,20 +3275,21 @@ struct DescriptorArrayAppender { struct FixedArrayAppender { typedef FixedArray Array; - static bool Contains(Name* key, - AccessorInfo* entry, + static bool Contains(Handle<Name> key, + Handle<AccessorInfo> entry, int valid_descriptors, Handle<FixedArray> array) { for (int i = 0; i < valid_descriptors; i++) { - if (key == AccessorInfo::cast(array->get(i))->name()) return true; + if (*key == AccessorInfo::cast(array->get(i))->name()) return true; } return false; } - static void Insert(Name* key, - AccessorInfo* entry, + static void Insert(Handle<Name> key, + Handle<AccessorInfo> entry, int valid_descriptors, Handle<FixedArray> array) { - array->set(valid_descriptors, entry); + DisallowHeapAllocation no_gc; + array->set(valid_descriptors, *entry); } }; @@ -3283,26 +3407,24 @@ bool Map::IsMapInArrayPrototypeChain() { } -static MaybeObject* AddMissingElementsTransitions(Map* map, - ElementsKind to_kind) { +static Handle<Map> AddMissingElementsTransitions(Handle<Map> map, + ElementsKind to_kind) { ASSERT(IsTransitionElementsKind(map->elements_kind())); - Map* current_map = map; + Handle<Map> current_map = map; ElementsKind kind = map->elements_kind(); while (kind != to_kind && !IsTerminalElementsKind(kind)) { kind = GetNextTransitionElementsKind(kind); - MaybeObject* maybe_next_map = - current_map->CopyAsElementsKind(kind, INSERT_TRANSITION); - if (!maybe_next_map->To(¤t_map)) return maybe_next_map; + current_map = Map::CopyAsElementsKind( + current_map, kind, INSERT_TRANSITION); } // In case we are exiting the fast elements kind system, just add the map in // the end. if (kind != to_kind) { - MaybeObject* maybe_next_map = - current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION); - if (!maybe_next_map->To(¤t_map)) return maybe_next_map; + current_map = Map::CopyAsElementsKind( + current_map, to_kind, INSERT_TRANSITION); } ASSERT(current_map->elements_kind() == to_kind); @@ -3310,27 +3432,41 @@ static MaybeObject* AddMissingElementsTransitions(Map* map, } -Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, - ElementsKind to_kind) { - Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - object->GetElementsTransitionMap(isolate, to_kind), - Map); +Handle<Map> Map::TransitionElementsTo(Handle<Map> map, + ElementsKind to_kind) { + ElementsKind from_kind = map->elements_kind(); + if (from_kind == to_kind) return map; + + Isolate* isolate = map->GetIsolate(); + Context* native_context = isolate->context()->native_context(); + Object* maybe_array_maps = native_context->js_array_maps(); + if (maybe_array_maps->IsFixedArray()) { + DisallowHeapAllocation no_gc; + FixedArray* array_maps = FixedArray::cast(maybe_array_maps); + if (array_maps->get(from_kind) == *map) { + Object* maybe_transitioned_map = array_maps->get(to_kind); + if (maybe_transitioned_map->IsMap()) { + return handle(Map::cast(maybe_transitioned_map)); + } + } + } + + return TransitionElementsToSlow(map, to_kind); } -MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) { - Map* start_map = map(); - ElementsKind from_kind = start_map->elements_kind(); +Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map, + ElementsKind to_kind) { + ElementsKind from_kind = map->elements_kind(); if (from_kind == to_kind) { - return start_map; + return map; } bool allow_store_transition = // Only remember the map transition if there is not an already existing // non-matching element transition. - !start_map->IsUndefined() && !start_map->is_shared() && + !map->IsUndefined() && !map->is_shared() && IsTransitionElementsKind(from_kind); // Only store fast element maps in ascending generality. @@ -3341,24 +3477,16 @@ MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) { } if (!allow_store_transition) { - return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION); + return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION); } - return start_map->AsElementsKind(to_kind); + return Map::AsElementsKind(map, to_kind); } -// TODO(ishell): Temporary wrapper until handlified. // static Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - map->AsElementsKind(kind), - Map); -} - - -MaybeObject* Map::AsElementsKind(ElementsKind kind) { - Map* closest_map = FindClosestElementsTransition(this, kind); + Handle<Map> closest_map(FindClosestElementsTransition(*map, kind)); if (closest_map->elements_kind() == kind) { return closest_map; @@ -3368,7 +3496,15 @@ MaybeObject* Map::AsElementsKind(ElementsKind kind) { } -void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) { +Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, + ElementsKind to_kind) { + Handle<Map> map(object->map()); + return Map::TransitionElementsTo(map, to_kind); +} + + +void JSObject::LocalLookupRealNamedProperty(Handle<Name> name, + LookupResult* result) { DisallowHeapAllocation no_gc; if (IsJSGlobalProxy()) { Object* proto = GetPrototype(); @@ -3378,7 +3514,7 @@ void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) { } if (HasFastProperties()) { - map()->LookupDescriptor(this, name, result); + map()->LookupDescriptor(this, *name, result); // A property or a map transition was found. We return all of these result // types because LocalLookupRealNamedProperty is used when setting // properties where map transitions are handled. @@ -3416,7 +3552,9 @@ void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) { } -void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) { +void JSObject::LookupRealNamedProperty(Handle<Name> name, + LookupResult* result) { + DisallowHeapAllocation no_gc; LocalLookupRealNamedProperty(name, result); if (result->IsFound()) return; @@ -3424,8 +3562,9 @@ void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) { } -void JSObject::LookupRealNamedPropertyInPrototypes(Name* name, +void JSObject::LookupRealNamedPropertyInPrototypes(Handle<Name> name, LookupResult* result) { + DisallowHeapAllocation no_gc; Isolate* isolate = GetIsolate(); Heap* heap = isolate->heap(); for (Object* pt = GetPrototype(); @@ -3443,7 +3582,7 @@ void JSObject::LookupRealNamedPropertyInPrototypes(Name* name, // We only need to deal with CALLBACKS and INTERCEPTORS -Handle<Object> JSObject::SetPropertyWithFailedAccessCheck( +MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck( Handle<JSObject> object, LookupResult* result, Handle<Name> name, @@ -3451,7 +3590,7 @@ Handle<Object> JSObject::SetPropertyWithFailedAccessCheck( bool check_prototype, StrictMode strict_mode) { if (check_prototype && !result->IsProperty()) { - object->LookupRealNamedPropertyInPrototypes(*name, result); + object->LookupRealNamedPropertyInPrototypes(name, result); } if (result->IsProperty()) { @@ -3486,7 +3625,7 @@ Handle<Object> JSObject::SetPropertyWithFailedAccessCheck( // Try lookup real named properties. Note that only property can be // set is callbacks marked as ALL_CAN_WRITE on the prototype chain. LookupResult r(object->GetIsolate()); - object->LookupRealNamedProperty(*name, &r); + object->LookupRealNamedProperty(name, &r); if (r.IsProperty()) { return SetPropertyWithFailedAccessCheck(object, &r, @@ -3505,19 +3644,19 @@ Handle<Object> JSObject::SetPropertyWithFailedAccessCheck( } Isolate* isolate = object->GetIsolate(); - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return value; } -Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, - LookupResult* result, - Handle<Name> key, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - StoreFromKeyed store_mode) { +MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, + LookupResult* result, + Handle<Name> key, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + StoreFromKeyed store_mode) { if (result->IsHandler()) { return JSProxy::SetPropertyWithHandler(handle(result->proxy()), object, key, value, attributes, strict_mode); @@ -3535,34 +3674,47 @@ bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) { if (name->IsSymbol()) return false; Handle<Object> args[] = { name }; - Handle<Object> result = proxy->CallTrap( - "has", isolate->derived_has_trap(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return false; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, result, + CallTrap(proxy, + "has", + isolate->derived_has_trap(), + ARRAY_SIZE(args), + args), + false); return result->BooleanValue(); } -Handle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy, - Handle<JSReceiver> receiver, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode) { +MaybeHandle<Object> JSProxy::SetPropertyWithHandler( + Handle<JSProxy> proxy, + Handle<JSReceiver> receiver, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode) { Isolate* isolate = proxy->GetIsolate(); // TODO(rossberg): adjust once there is a story for symbols vs proxies. if (name->IsSymbol()) return value; Handle<Object> args[] = { receiver, name, value }; - proxy->CallTrap("set", isolate->derived_set_trap(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return Handle<Object>(); + RETURN_ON_EXCEPTION( + isolate, + CallTrap(proxy, + "set", + isolate->derived_set_trap(), + ARRAY_SIZE(args), + args), + Object); return value; } -Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( +MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( Handle<JSProxy> proxy, Handle<JSReceiver> receiver, Handle<Name> name, @@ -3581,9 +3733,15 @@ Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( *done = true; // except where redefined... Handle<Object> args[] = { name }; - Handle<Object> result = proxy->CallTrap( - "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return Handle<Object>(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + CallTrap(proxy, + "getPropertyDescriptor", + Handle<Object>(), + ARRAY_SIZE(args), + args), + Object); if (result->IsUndefined()) { *done = false; @@ -3591,21 +3749,24 @@ Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( } // Emulate [[GetProperty]] semantics for proxies. - bool has_pending_exception; Handle<Object> argv[] = { result }; - Handle<Object> desc = Execution::Call( - isolate, isolate->to_complete_property_descriptor(), result, - ARRAY_SIZE(argv), argv, &has_pending_exception); - if (has_pending_exception) return Handle<Object>(); + Handle<Object> desc; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, desc, + Execution::Call(isolate, + isolate->to_complete_property_descriptor(), + result, + ARRAY_SIZE(argv), + argv), + Object); // [[GetProperty]] requires to check that all properties are configurable. Handle<String> configurable_name = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("configurable_")); - Handle<Object> configurable( - v8::internal::GetProperty(isolate, desc, configurable_name)); - ASSERT(!isolate->has_pending_exception()); - ASSERT(configurable->IsTrue() || configurable->IsFalse()); + Handle<Object> configurable = + Object::GetProperty(desc, configurable_name).ToHandleChecked(); + ASSERT(configurable->IsBoolean()); if (configurable->IsFalse()) { Handle<String> trap = isolate->factory()->InternalizeOneByteString( @@ -3613,8 +3774,7 @@ Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( Handle<Object> args[] = { handler, trap, name }; Handle<Object> error = isolate->factory()->NewTypeError( "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } ASSERT(configurable->IsTrue()); @@ -3622,33 +3782,29 @@ Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( Handle<String> hasWritable_name = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("hasWritable_")); - Handle<Object> hasWritable( - v8::internal::GetProperty(isolate, desc, hasWritable_name)); - ASSERT(!isolate->has_pending_exception()); - ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse()); + Handle<Object> hasWritable = + Object::GetProperty(desc, hasWritable_name).ToHandleChecked(); + ASSERT(hasWritable->IsBoolean()); if (hasWritable->IsTrue()) { Handle<String> writable_name = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("writable_")); - Handle<Object> writable( - v8::internal::GetProperty(isolate, desc, writable_name)); - ASSERT(!isolate->has_pending_exception()); - ASSERT(writable->IsTrue() || writable->IsFalse()); + Handle<Object> writable = + Object::GetProperty(desc, writable_name).ToHandleChecked(); + ASSERT(writable->IsBoolean()); *done = writable->IsFalse(); if (!*done) return isolate->factory()->the_hole_value(); if (strict_mode == SLOPPY) return value; Handle<Object> args[] = { name, receiver }; Handle<Object> error = isolate->factory()->NewTypeError( "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } // We have an AccessorDescriptor. Handle<String> set_name = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("set_")); - Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_name)); - ASSERT(!isolate->has_pending_exception()); + Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked(); if (!setter->IsUndefined()) { // TODO(rossberg): nicer would be to cast to some JSCallable here... return SetPropertyWithDefinedSetter( @@ -3659,12 +3815,11 @@ Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( Handle<Object> args2[] = { name, proxy }; Handle<Object> error = isolate->factory()->NewTypeError( "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } -Handle<Object> JSProxy::DeletePropertyWithHandler( +MaybeHandle<Object> JSProxy::DeletePropertyWithHandler( Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) { Isolate* isolate = proxy->GetIsolate(); @@ -3672,9 +3827,15 @@ Handle<Object> JSProxy::DeletePropertyWithHandler( if (name->IsSymbol()) return isolate->factory()->false_value(); Handle<Object> args[] = { name }; - Handle<Object> result = proxy->CallTrap( - "delete", Handle<Object>(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return Handle<Object>(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + CallTrap(proxy, + "delete", + Handle<Object>(), + ARRAY_SIZE(args), + args), + Object); bool result_bool = result->BooleanValue(); if (mode == STRICT_DELETION && !result_bool) { @@ -3684,14 +3845,13 @@ Handle<Object> JSProxy::DeletePropertyWithHandler( Handle<Object> args[] = { handler, trap_name }; Handle<Object> error = isolate->factory()->NewTypeError( "handler_failed", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } return isolate->factory()->ToBoolean(result_bool); } -Handle<Object> JSProxy::DeleteElementWithHandler( +MaybeHandle<Object> JSProxy::DeleteElementWithHandler( Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) { Isolate* isolate = proxy->GetIsolate(); Handle<String> name = isolate->factory()->Uint32ToString(index); @@ -3710,37 +3870,51 @@ PropertyAttributes JSProxy::GetPropertyAttributeWithHandler( if (name->IsSymbol()) return ABSENT; Handle<Object> args[] = { name }; - Handle<Object> result = proxy->CallTrap( - "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args); - if (isolate->has_pending_exception()) return NONE; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, result, + proxy->CallTrap(proxy, + "getPropertyDescriptor", + Handle<Object>(), + ARRAY_SIZE(args), + args), + NONE); if (result->IsUndefined()) return ABSENT; - bool has_pending_exception; Handle<Object> argv[] = { result }; - Handle<Object> desc = Execution::Call( - isolate, isolate->to_complete_property_descriptor(), result, - ARRAY_SIZE(argv), argv, &has_pending_exception); - if (has_pending_exception) return NONE; + Handle<Object> desc; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, desc, + Execution::Call(isolate, + isolate->to_complete_property_descriptor(), + result, + ARRAY_SIZE(argv), + argv), + NONE); // Convert result to PropertyAttributes. Handle<String> enum_n = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("enumerable_")); - Handle<Object> enumerable(v8::internal::GetProperty(isolate, desc, enum_n)); - if (isolate->has_pending_exception()) return NONE; + Handle<Object> enumerable; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, enumerable, Object::GetProperty(desc, enum_n), NONE); Handle<String> conf_n = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("configurable_")); - Handle<Object> configurable(v8::internal::GetProperty(isolate, desc, conf_n)); - if (isolate->has_pending_exception()) return NONE; + Handle<Object> configurable; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, configurable, Object::GetProperty(desc, conf_n), NONE); Handle<String> writ_n = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("writable_")); - Handle<Object> writable(v8::internal::GetProperty(isolate, desc, writ_n)); - if (isolate->has_pending_exception()) return NONE; + Handle<Object> writable; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, writable, Object::GetProperty(desc, writ_n), NONE); if (!writable->BooleanValue()) { Handle<String> set_n = isolate->factory()->InternalizeOneByteString( STATIC_ASCII_VECTOR("set_")); - Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_n)); - if (isolate->has_pending_exception()) return NONE; + Handle<Object> setter; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, setter, Object::GetProperty(desc, set_n), NONE); writable = isolate->factory()->ToBoolean(!setter->IsUndefined()); } @@ -3795,36 +3969,32 @@ void JSProxy::Fix(Handle<JSProxy> proxy) { } -MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name, - Handle<Object> derived, - int argc, - Handle<Object> argv[]) { - Isolate* isolate = GetIsolate(); - Handle<Object> handler(this->handler(), isolate); +MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy, + const char* name, + Handle<Object> derived, + int argc, + Handle<Object> argv[]) { + Isolate* isolate = proxy->GetIsolate(); + Handle<Object> handler(proxy->handler(), isolate); Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name); - Handle<Object> trap(v8::internal::GetProperty(isolate, handler, trap_name)); - if (isolate->has_pending_exception()) return trap; + Handle<Object> trap; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, trap, + Object::GetPropertyOrElement(handler, trap_name), + Object); if (trap->IsUndefined()) { if (derived.is_null()) { Handle<Object> args[] = { handler, trap_name }; Handle<Object> error = isolate->factory()->NewTypeError( "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } trap = Handle<Object>(derived); } - bool threw; - return Execution::Call(isolate, trap, handler, argc, argv, &threw); -} - - -// TODO(mstarzinger): Temporary wrapper until handlified. -static Handle<Map> MapAsElementsKind(Handle<Map> map, ElementsKind kind) { - CALL_HEAP_FUNCTION(map->GetIsolate(), map->AsElementsKind(kind), Map); + return Execution::Call(isolate, trap, handler, argc, argv); } @@ -3843,7 +4013,7 @@ void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) { } else { TransitionElementsKind(object, to_kind); } - map = MapAsElementsKind(map, to_kind); + map = Map::AsElementsKind(map, to_kind); } JSObject::MigrateToMap(object, map); } @@ -3855,7 +4025,9 @@ void JSObject::MigrateInstance(Handle<JSObject> object) { // transition that matches the object. This achieves what is needed. Handle<Map> original_map(object->map()); GeneralizeFieldRepresentation( - object, 0, Representation::None(), ALLOW_AS_CONSTANT); + object, 0, Representation::None(), + HeapType::None(object->GetIsolate()), + ALLOW_AS_CONSTANT); object->map()->set_migration_target(true); if (FLAG_trace_migration) { object->PrintInstanceMigration(stdout, *original_map, object->map()); @@ -3863,19 +4035,24 @@ void JSObject::MigrateInstance(Handle<JSObject> object) { } -Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) { - Handle<Map> original_map(object->map()); - Handle<Map> new_map = Map::CurrentMapForDeprecatedInternal(original_map); - if (new_map.is_null()) return Handle<Object>(); +// static +bool JSObject::TryMigrateInstance(Handle<JSObject> object) { + Isolate* isolate = object->GetIsolate(); + DisallowDeoptimization no_deoptimization(isolate); + Handle<Map> original_map(object->map(), isolate); + Handle<Map> new_map; + if (!Map::CurrentMapForDeprecatedInternal(original_map).ToHandle(&new_map)) { + return false; + } JSObject::MigrateToMap(object, new_map); if (FLAG_trace_migration) { object->PrintInstanceMigration(stdout, *original_map, object->map()); } - return object; + return true; } -Handle<Object> JSObject::SetPropertyUsingTransition( +MaybeHandle<Object> JSObject::SetPropertyUsingTransition( Handle<JSObject> object, LookupResult* lookup, Handle<Name> name, @@ -3884,7 +4061,7 @@ Handle<Object> JSObject::SetPropertyUsingTransition( Handle<Map> transition_map(lookup->GetTransitionTarget()); int descriptor = transition_map->LastAdded(); - DescriptorArray* descriptors = transition_map->instance_descriptors(); + Handle<DescriptorArray> descriptors(transition_map->instance_descriptors()); PropertyDetails details = descriptors->GetDetails(descriptor); if (details.type() == CALLBACKS || attributes != details.attributes()) { @@ -3901,59 +4078,61 @@ Handle<Object> JSObject::SetPropertyUsingTransition( // Keep the target CONSTANT if the same value is stored. // TODO(verwaest): Also support keeping the placeholder // (value->IsUninitialized) as constant. - if (!value->FitsRepresentation(details.representation()) || - (details.type() == CONSTANT && - descriptors->GetValue(descriptor) != *value)) { - transition_map = Map::GeneralizeRepresentation(transition_map, - descriptor, value->OptimalRepresentation(), FORCE_FIELD); + if (!lookup->CanHoldValue(value)) { + Representation field_representation = value->OptimalRepresentation(); + Handle<HeapType> field_type = value->OptimalType( + lookup->isolate(), field_representation); + transition_map = Map::GeneralizeRepresentation( + transition_map, descriptor, + field_representation, field_type, FORCE_FIELD); } - JSObject::MigrateToMap(object, transition_map); + JSObject::MigrateToNewProperty(object, transition_map, value); + return value; +} - // Reload. - descriptors = transition_map->instance_descriptors(); - details = descriptors->GetDetails(descriptor); - if (details.type() != FIELD) return value; +void JSObject::MigrateToNewProperty(Handle<JSObject> object, + Handle<Map> map, + Handle<Object> value) { + JSObject::MigrateToMap(object, map); + if (map->GetLastDescriptorDetails().type() != FIELD) return; + object->WriteToField(map->LastAdded(), *value); +} - int field_index = descriptors->GetFieldIndex(descriptor); + +void JSObject::WriteToField(int descriptor, Object* value) { + DisallowHeapAllocation no_gc; + + DescriptorArray* desc = map()->instance_descriptors(); + PropertyDetails details = desc->GetDetails(descriptor); + + ASSERT(details.type() == FIELD); + + int field_index = desc->GetFieldIndex(descriptor); if (details.representation().IsDouble()) { // Nothing more to be done. - if (value->IsUninitialized()) return value; - HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(field_index)); + if (value->IsUninitialized()) return; + HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(field_index)); box->set_value(value->Number()); } else { - object->FastPropertyAtPut(field_index, *value); + FastPropertyAtPut(field_index, value); } - - return value; } static void SetPropertyToField(LookupResult* lookup, - Handle<Name> name, Handle<Object> value) { - Representation representation = lookup->representation(); - if (!value->FitsRepresentation(representation) || - lookup->type() == CONSTANT) { + if (lookup->type() == CONSTANT || !lookup->CanHoldValue(value)) { + Representation field_representation = value->OptimalRepresentation(); + Handle<HeapType> field_type = value->OptimalType( + lookup->isolate(), field_representation); JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()), lookup->GetDescriptorIndex(), - value->OptimalRepresentation(), + field_representation, field_type, FORCE_FIELD); - DescriptorArray* desc = lookup->holder()->map()->instance_descriptors(); - int descriptor = lookup->GetDescriptorIndex(); - representation = desc->GetDetails(descriptor).representation(); - } - - if (representation.IsDouble()) { - HeapNumber* storage = HeapNumber::cast(lookup->holder()->RawFastPropertyAt( - lookup->GetFieldIndex().field_index())); - storage->set_value(value->Number()); - return; } - - lookup->holder()->FastPropertyAtPut( - lookup->GetFieldIndex().field_index(), *value); + lookup->holder()->WriteToField(lookup->GetDescriptorIndex(), *value); } @@ -3974,7 +4153,8 @@ static void ConvertAndSetLocalProperty(LookupResult* lookup, int descriptor_index = lookup->GetDescriptorIndex(); if (lookup->GetAttributes() == attributes) { JSObject::GeneralizeFieldRepresentation( - object, descriptor_index, Representation::Tagged(), FORCE_FIELD); + object, descriptor_index, Representation::Tagged(), + HeapType::Any(lookup->isolate()), FORCE_FIELD); } else { Handle<Map> old_map(object->map()); Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map, @@ -3982,9 +4162,7 @@ static void ConvertAndSetLocalProperty(LookupResult* lookup, JSObject::MigrateToMap(object, new_map); } - DescriptorArray* descriptors = object->map()->instance_descriptors(); - int index = descriptors->GetDetails(descriptor_index).field_index(); - object->FastPropertyAtPut(index, *value); + object->WriteToField(descriptor_index, *value); } @@ -3994,20 +4172,21 @@ static void SetPropertyToFieldWithAttributes(LookupResult* lookup, PropertyAttributes attributes) { if (lookup->GetAttributes() == attributes) { if (value->IsUninitialized()) return; - SetPropertyToField(lookup, name, value); + SetPropertyToField(lookup, value); } else { ConvertAndSetLocalProperty(lookup, name, value, attributes); } } -Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, - LookupResult* lookup, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - StoreFromKeyed store_mode) { +MaybeHandle<Object> JSObject::SetPropertyForResult( + Handle<JSObject> object, + LookupResult* lookup, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + StoreFromKeyed store_mode) { Isolate* isolate = object->GetIsolate(); // Make sure that the top context does not change when doing callbacks or @@ -4024,7 +4203,7 @@ Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) { + if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { return SetPropertyWithFailedAccessCheck(object, lookup, name, value, true, strict_mode); } @@ -4043,8 +4222,12 @@ Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) { bool done = false; - Handle<Object> result_object = SetPropertyViaPrototypes( - object, name, value, attributes, strict_mode, &done); + Handle<Object> result_object; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result_object, + SetPropertyViaPrototypes( + object, name, value, attributes, strict_mode, &done), + Object); if (done) return result_object; } @@ -4059,8 +4242,7 @@ Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, Handle<Object> args[] = { name, object }; Handle<Object> error = isolate->factory()->NewTypeError( "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } else { return value; } @@ -4070,54 +4252,55 @@ Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, bool is_observed = object->map()->is_observed() && *name != isolate->heap()->hidden_string(); if (is_observed && lookup->IsDataProperty()) { - old_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, old_value); + old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); } // This is a real property that is not read-only, or it is a // transition or null descriptor and there are no setters in the prototypes. - Handle<Object> result = value; - switch (lookup->type()) { - case NORMAL: - SetNormalizedProperty(handle(lookup->holder()), lookup, value); - break; - case FIELD: - SetPropertyToField(lookup, name, value); - break; - case CONSTANT: - // Only replace the constant if necessary. - if (*value == lookup->GetConstant()) return value; - SetPropertyToField(lookup, name, value); - break; - case CALLBACKS: { - Handle<Object> callback_object(lookup->GetCallbackObject(), isolate); - return SetPropertyWithCallback(object, callback_object, name, value, - handle(lookup->holder()), strict_mode); + MaybeHandle<Object> maybe_result = value; + if (lookup->IsTransition()) { + maybe_result = SetPropertyUsingTransition(handle(lookup->holder()), lookup, + name, value, attributes); + } else { + switch (lookup->type()) { + case NORMAL: + SetNormalizedProperty(handle(lookup->holder()), lookup, value); + break; + case FIELD: + SetPropertyToField(lookup, value); + break; + case CONSTANT: + // Only replace the constant if necessary. + if (*value == lookup->GetConstant()) return value; + SetPropertyToField(lookup, value); + break; + case CALLBACKS: { + Handle<Object> callback_object(lookup->GetCallbackObject(), isolate); + return SetPropertyWithCallback(object, callback_object, name, value, + handle(lookup->holder()), strict_mode); + } + case INTERCEPTOR: + maybe_result = SetPropertyWithInterceptor( + handle(lookup->holder()), name, value, attributes, strict_mode); + break; + case HANDLER: + case NONEXISTENT: + UNREACHABLE(); } - case INTERCEPTOR: - result = SetPropertyWithInterceptor(handle(lookup->holder()), name, value, - attributes, strict_mode); - break; - case TRANSITION: - result = SetPropertyUsingTransition(handle(lookup->holder()), lookup, - name, value, attributes); - break; - case HANDLER: - case NONEXISTENT: - UNREACHABLE(); } - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object); if (is_observed) { if (lookup->IsTransition()) { EnqueueChangeRecord(object, "add", name, old_value); } else { LookupResult new_lookup(isolate); - object->LocalLookup(*name, &new_lookup, true); + object->LocalLookup(name, &new_lookup, true); if (new_lookup.IsDataProperty()) { - Handle<Object> new_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, new_value); + Handle<Object> new_value = + Object::GetPropertyOrElement(object, name).ToHandleChecked(); if (!new_value->SameValue(*old_value)) { EnqueueChangeRecord(object, "update", name, old_value); } @@ -4138,14 +4321,15 @@ Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, // Note that this method cannot be used to set the prototype of a function // because ConvertDescriptorToField() which is called in "case CALLBACKS:" // doesn't handle function prototypes correctly. -Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( +MaybeHandle<Object> JSObject::SetLocalPropertyIgnoreAttributes( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes, ValueType value_type, StoreMode mode, - ExtensibilityCheck extensibility_check) { + ExtensibilityCheck extensibility_check, + StoreFromKeyed store_from_keyed) { Isolate* isolate = object->GetIsolate(); // Make sure that the top context does not change when doing callbacks or @@ -4153,14 +4337,14 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( AssertNoContextChange ncc(isolate); LookupResult lookup(isolate); - object->LocalLookup(*name, &lookup, true); + object->LocalLookup(name, &lookup, true); if (!lookup.IsFound()) { object->map()->LookupTransition(*object, *name, &lookup); } // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) { + if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { return SetPropertyWithFailedAccessCheck(object, &lookup, name, value, false, SLOPPY); } @@ -4174,9 +4358,9 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( name, value, attributes, value_type, mode, extensibility_check); } - if (lookup.IsFound() && - (lookup.type() == INTERCEPTOR || lookup.type() == CALLBACKS)) { - object->LocalLookupRealNamedProperty(*name, &lookup); + if (lookup.IsInterceptor() || + (lookup.IsDescriptorOrDictionary() && lookup.type() == CALLBACKS)) { + object->LocalLookupRealNamedProperty(name, &lookup); } // Check for accessor in prototype chain removed here in clone. @@ -4186,7 +4370,7 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( ? OMIT_TRANSITION : INSERT_TRANSITION; // Neither properties nor transitions found. return AddProperty(object, name, value, attributes, SLOPPY, - MAY_BE_STORE_FROM_KEYED, extensibility_check, value_type, mode, flag); + store_from_keyed, extensibility_check, value_type, mode, flag); } Handle<Object> old_value = isolate->factory()->the_hole_value(); @@ -4195,40 +4379,42 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( *name != isolate->heap()->hidden_string(); if (is_observed && lookup.IsProperty()) { if (lookup.IsDataProperty()) { - old_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, old_value); + old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); } old_attributes = lookup.GetAttributes(); } // Check of IsReadOnly removed from here in clone. - switch (lookup.type()) { - case NORMAL: - ReplaceSlowProperty(object, name, value, attributes); - break; - case FIELD: - SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); - break; - case CONSTANT: - // Only replace the constant if necessary. - if (lookup.GetAttributes() != attributes || - *value != lookup.GetConstant()) { + if (lookup.IsTransition()) { + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + SetPropertyUsingTransition( + handle(lookup.holder()), &lookup, name, value, attributes), + Object); + } else { + switch (lookup.type()) { + case NORMAL: + ReplaceSlowProperty(object, name, value, attributes); + break; + case FIELD: SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); - } - break; - case CALLBACKS: - ConvertAndSetLocalProperty(&lookup, name, value, attributes); - break; - case TRANSITION: { - Handle<Object> result = SetPropertyUsingTransition( - handle(lookup.holder()), &lookup, name, value, attributes); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); - break; + break; + case CONSTANT: + // Only replace the constant if necessary. + if (lookup.GetAttributes() != attributes || + *value != lookup.GetConstant()) { + SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); + } + break; + case CALLBACKS: + ConvertAndSetLocalProperty(&lookup, name, value, attributes); + break; + case NONEXISTENT: + case HANDLER: + case INTERCEPTOR: + UNREACHABLE(); } - case NONEXISTENT: - case HANDLER: - case INTERCEPTOR: - UNREACHABLE(); } if (is_observed) { @@ -4238,11 +4424,11 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( EnqueueChangeRecord(object, "reconfigure", name, old_value); } else { LookupResult new_lookup(isolate); - object->LocalLookup(*name, &new_lookup, true); + object->LocalLookup(name, &new_lookup, true); bool value_changed = false; if (new_lookup.IsDataProperty()) { - Handle<Object> new_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, new_value); + Handle<Object> new_value = + Object::GetPropertyOrElement(object, name).ToHandleChecked(); value_changed = !old_value->SameValue(*new_value); } if (new_lookup.GetAttributes() != old_attributes) { @@ -4266,7 +4452,7 @@ PropertyAttributes JSObject::GetPropertyAttributePostInterceptor( // Check local property, ignore interceptor. Isolate* isolate = object->GetIsolate(); LookupResult result(isolate); - object->LocalLookupRealNamedProperty(*name, &result); + object->LocalLookupRealNamedProperty(name, &result); if (result.IsFound()) return result.GetAttributes(); if (continue_search) { @@ -4335,7 +4521,7 @@ PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver( } // Named property. LookupResult lookup(object->GetIsolate()); - object->Lookup(*key, &lookup); + object->Lookup(key, &lookup); return GetPropertyAttributeForResult(object, receiver, &lookup, key, true); } @@ -4350,7 +4536,7 @@ PropertyAttributes JSReceiver::GetPropertyAttributeForResult( if (object->IsAccessCheckNeeded()) { Heap* heap = object->GetHeap(); Handle<JSObject> obj = Handle<JSObject>::cast(object); - if (!heap->isolate()->MayNamedAccessWrapper(obj, name, v8::ACCESS_HAS)) { + if (!heap->isolate()->MayNamedAccess(obj, name, v8::ACCESS_HAS)) { return JSObject::GetPropertyAttributeWithFailedAccessCheck( obj, lookup, name, continue_search); } @@ -4372,7 +4558,6 @@ PropertyAttributes JSReceiver::GetPropertyAttributeForResult( Handle<JSObject>::cast(receiver), name, continue_search); - case TRANSITION: case NONEXISTENT: UNREACHABLE(); } @@ -4390,7 +4575,7 @@ PropertyAttributes JSReceiver::GetLocalPropertyAttribute( } // Named property. LookupResult lookup(object->GetIsolate()); - object->LocalLookup(*name, &lookup, true); + object->LocalLookup(name, &lookup, true); return GetPropertyAttributeForResult(object, object, &lookup, name, false); } @@ -4404,8 +4589,9 @@ PropertyAttributes JSObject::GetElementAttributeWithReceiver( // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); + if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return ABSENT; } } @@ -4473,7 +4659,7 @@ PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor( uint32_t index, bool continue_search) { PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes( - *receiver, *object, index); + receiver, object, index); if (attr != ABSENT) return attr; // Handle [] on String objects. @@ -4495,49 +4681,30 @@ PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor( } -Handle<Map> NormalizedMapCache::Get(Handle<NormalizedMapCache> cache, - Handle<JSObject> obj, - PropertyNormalizationMode mode) { - int index = obj->map()->Hash() % kEntries; - Handle<Object> result = handle(cache->get(index), cache->GetIsolate()); - if (result->IsMap() && - Handle<Map>::cast(result)->EquivalentToForNormalization(obj->map(), - mode)) { -#ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - Handle<Map>::cast(result)->SharedMapVerify(); - } -#endif -#ifdef ENABLE_SLOW_ASSERTS - if (FLAG_enable_slow_asserts) { - // The cached map should match newly created normalized map bit-by-bit, - // except for the code cache, which can contain some ics which can be - // applied to the shared map. - Handle<Map> fresh = Map::CopyNormalized(handle(obj->map()), mode, - SHARED_NORMALIZED_MAP); +Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) { + Handle<FixedArray> array( + isolate->factory()->NewFixedArray(kEntries, TENURED)); + return Handle<NormalizedMapCache>::cast(array); +} - ASSERT(memcmp(fresh->address(), - Handle<Map>::cast(result)->address(), - Map::kCodeCacheOffset) == 0); - STATIC_ASSERT(Map::kDependentCodeOffset == - Map::kCodeCacheOffset + kPointerSize); - int offset = Map::kDependentCodeOffset + kPointerSize; - ASSERT(memcmp(fresh->address() + offset, - Handle<Map>::cast(result)->address() + offset, - Map::kSize - offset) == 0); - } -#endif - return Handle<Map>::cast(result); + +MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map, + PropertyNormalizationMode mode) { + DisallowHeapAllocation no_gc; + Object* value = FixedArray::get(GetIndex(fast_map)); + if (!value->IsMap() || + !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) { + return MaybeHandle<Map>(); } + return handle(Map::cast(value)); +} - Isolate* isolate = cache->GetIsolate(); - Handle<Map> map = Map::CopyNormalized(handle(obj->map()), mode, - SHARED_NORMALIZED_MAP); - ASSERT(map->is_dictionary_map()); - cache->set(index, *map); - isolate->counters()->normalized_maps()->Increment(); - return map; +void NormalizedMapCache::Set(Handle<Map> fast_map, + Handle<Map> normalized_map) { + DisallowHeapAllocation no_gc; + ASSERT(normalized_map->is_dictionary_map()); + FixedArray::set(GetIndex(fast_map), *normalized_map); } @@ -4570,6 +4737,7 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, Isolate* isolate = object->GetIsolate(); HandleScope scope(isolate); Handle<Map> map(object->map()); + Handle<Map> new_map = Map::Normalize(map, mode); // Allocate new content. int real_size = map->NumberOfOwnDescriptors(); @@ -4580,7 +4748,7 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, property_count += 2; // Make space for two more properties. } Handle<NameDictionary> dictionary = - isolate->factory()->NewNameDictionary(property_count); + NameDictionary::New(isolate, property_count); Handle<DescriptorArray> descs(map->instance_descriptors()); for (int i = 0; i < real_size; i++) { @@ -4591,7 +4759,7 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, Handle<Object> value(descs->GetConstant(i), isolate); PropertyDetails d = PropertyDetails( details.attributes(), NORMAL, i + 1); - dictionary = NameDictionaryAdd(dictionary, key, value, d); + dictionary = NameDictionary::Add(dictionary, key, value, d); break; } case FIELD: { @@ -4600,7 +4768,7 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, object->RawFastPropertyAt(descs->GetFieldIndex(i)), isolate); PropertyDetails d = PropertyDetails(details.attributes(), NORMAL, i + 1); - dictionary = NameDictionaryAdd(dictionary, key, value, d); + dictionary = NameDictionary::Add(dictionary, key, value, d); break; } case CALLBACKS: { @@ -4608,14 +4776,13 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, Handle<Object> value(descs->GetCallbacksObject(i), isolate); PropertyDetails d = PropertyDetails( details.attributes(), CALLBACKS, i + 1); - dictionary = NameDictionaryAdd(dictionary, key, value, d); + dictionary = NameDictionary::Add(dictionary, key, value, d); break; } case INTERCEPTOR: break; case HANDLER: case NORMAL: - case TRANSITION: case NONEXISTENT: UNREACHABLE(); break; @@ -4625,11 +4792,6 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, // Copy the next enumeration index from instance descriptor. dictionary->SetNextEnumerationIndex(real_size + 1); - Handle<NormalizedMapCache> cache( - isolate->context()->native_context()->normalized_map_cache()); - Handle<Map> new_map = NormalizedMapCache::Get(cache, object, mode); - ASSERT(new_map->is_dictionary_map()); - // From here on we cannot fail and we shouldn't GC anymore. DisallowHeapAllocation no_allocation; @@ -4644,8 +4806,9 @@ void JSObject::NormalizeProperties(Handle<JSObject> object, -instance_size_delta, Heap::FROM_MUTATOR); - object->set_map(*new_map); - map->NotifyLeafMapLayoutChange(); + // We are storing the new map using release store after creating a filler for + // the left-over space to avoid races with the sweeper thread. + object->synchronized_set_map(*new_map); object->set_properties(*dictionary); @@ -4664,10 +4827,162 @@ void JSObject::TransformToFastProperties(Handle<JSObject> object, int unused_property_fields) { if (object->HasFastProperties()) return; ASSERT(!object->IsGlobalObject()); - CALL_HEAP_FUNCTION_VOID( - object->GetIsolate(), - object->property_dictionary()->TransformPropertiesToFastFor( - *object, unused_property_fields)); + Isolate* isolate = object->GetIsolate(); + Factory* factory = isolate->factory(); + Handle<NameDictionary> dictionary(object->property_dictionary()); + + // Make sure we preserve dictionary representation if there are too many + // descriptors. + int number_of_elements = dictionary->NumberOfElements(); + if (number_of_elements > kMaxNumberOfDescriptors) return; + + if (number_of_elements != dictionary->NextEnumerationIndex()) { + NameDictionary::DoGenerateNewEnumerationIndices(dictionary); + } + + int instance_descriptor_length = 0; + int number_of_fields = 0; + + // Compute the length of the instance descriptor. + int capacity = dictionary->Capacity(); + for (int i = 0; i < capacity; i++) { + Object* k = dictionary->KeyAt(i); + if (dictionary->IsKey(k)) { + Object* value = dictionary->ValueAt(i); + PropertyType type = dictionary->DetailsAt(i).type(); + ASSERT(type != FIELD); + instance_descriptor_length++; + if (type == NORMAL && !value->IsJSFunction()) { + number_of_fields += 1; + } + } + } + + int inobject_props = object->map()->inobject_properties(); + + // Allocate new map. + Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); + new_map->set_dictionary_map(false); + + if (instance_descriptor_length == 0) { + DisallowHeapAllocation no_gc; + ASSERT_LE(unused_property_fields, inobject_props); + // Transform the object. + new_map->set_unused_property_fields(inobject_props); + object->set_map(*new_map); + object->set_properties(isolate->heap()->empty_fixed_array()); + // Check that it really works. + ASSERT(object->HasFastProperties()); + return; + } + + // Allocate the instance descriptor. + Handle<DescriptorArray> descriptors = DescriptorArray::Allocate( + isolate, instance_descriptor_length); + + int number_of_allocated_fields = + number_of_fields + unused_property_fields - inobject_props; + if (number_of_allocated_fields < 0) { + // There is enough inobject space for all fields (including unused). + number_of_allocated_fields = 0; + unused_property_fields = inobject_props - number_of_fields; + } + + // Allocate the fixed array for the fields. + Handle<FixedArray> fields = factory->NewFixedArray( + number_of_allocated_fields); + + // Fill in the instance descriptor and the fields. + int current_offset = 0; + for (int i = 0; i < capacity; i++) { + Object* k = dictionary->KeyAt(i); + if (dictionary->IsKey(k)) { + Object* value = dictionary->ValueAt(i); + Handle<Name> key; + if (k->IsSymbol()) { + key = handle(Symbol::cast(k)); + } else { + // Ensure the key is a unique name before writing into the + // instance descriptor. + key = factory->InternalizeString(handle(String::cast(k))); + } + + PropertyDetails details = dictionary->DetailsAt(i); + int enumeration_index = details.dictionary_index(); + PropertyType type = details.type(); + + if (value->IsJSFunction()) { + ConstantDescriptor d(key, + handle(value, isolate), + details.attributes()); + descriptors->Set(enumeration_index - 1, &d); + } else if (type == NORMAL) { + if (current_offset < inobject_props) { + object->InObjectPropertyAtPut(current_offset, + value, + UPDATE_WRITE_BARRIER); + } else { + int offset = current_offset - inobject_props; + fields->set(offset, value); + } + FieldDescriptor d(key, + current_offset++, + details.attributes(), + // TODO(verwaest): value->OptimalRepresentation(); + Representation::Tagged()); + descriptors->Set(enumeration_index - 1, &d); + } else if (type == CALLBACKS) { + CallbacksDescriptor d(key, + handle(value, isolate), + details.attributes()); + descriptors->Set(enumeration_index - 1, &d); + } else { + UNREACHABLE(); + } + } + } + ASSERT(current_offset == number_of_fields); + + descriptors->Sort(); + + DisallowHeapAllocation no_gc; + new_map->InitializeDescriptors(*descriptors); + new_map->set_unused_property_fields(unused_property_fields); + + // Transform the object. + object->set_map(*new_map); + + object->set_properties(*fields); + ASSERT(object->IsJSObject()); + + // Check that it really works. + ASSERT(object->HasFastProperties()); +} + + +void JSObject::ResetElements(Handle<JSObject> object) { + if (object->map()->is_observed()) { + // Maintain invariant that observed elements are always in dictionary mode. + Isolate* isolate = object->GetIsolate(); + Factory* factory = isolate->factory(); + Handle<SeededNumberDictionary> dictionary = + SeededNumberDictionary::New(isolate, 0); + if (object->map() == *factory->sloppy_arguments_elements_map()) { + FixedArray::cast(object->elements())->set(1, *dictionary); + } else { + object->set_elements(*dictionary); + } + return; + } + + ElementsKind elements_kind = GetInitialFastElementsKind(); + if (!FLAG_smi_only_arrays) { + elements_kind = FastSmiToObjectElementsKind(elements_kind); + } + Handle<Map> map = JSObject::GetElementsTransitionMap(object, elements_kind); + DisallowHeapAllocation no_gc; + Handle<FixedArrayBase> elements(map->GetInitialElements()); + JSObject::SetMapAndElements(object, map, elements); } @@ -4706,7 +5021,6 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements( ASSERT(!object->HasExternalArrayElements() && !object->HasFixedTypedArrayElements()); Isolate* isolate = object->GetIsolate(); - Factory* factory = isolate->factory(); // Find the backing store. Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements())); @@ -4729,7 +5043,7 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements( int used_elements = 0; object->GetElementsCapacityAndUsage(&old_capacity, &used_elements); Handle<SeededNumberDictionary> dictionary = - factory->NewSeededNumberDictionary(used_elements); + SeededNumberDictionary::New(isolate, used_elements); dictionary = CopyFastElementsToDictionary(array, length, dictionary); @@ -4761,9 +5075,7 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements( } -Smi* JSReceiver::GenerateIdentityHash() { - Isolate* isolate = GetIsolate(); - +static Smi* GenerateIdentityHash(Isolate* isolate) { int hash_value; int attempts = 0; do { @@ -4779,33 +5091,51 @@ Smi* JSReceiver::GenerateIdentityHash() { void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) { + ASSERT(!object->IsJSGlobalProxy()); Isolate* isolate = object->GetIsolate(); SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); } +template<typename ProxyType> +static Handle<Object> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) { + Isolate* isolate = proxy->GetIsolate(); + + Handle<Object> hash(proxy->hash(), isolate); + if (hash->IsSmi()) return hash; + + hash = handle(GenerateIdentityHash(isolate), isolate); + proxy->set_hash(*hash); + return hash; +} + + Object* JSObject::GetIdentityHash() { - Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string()); - return stored_value->IsSmi() ? stored_value : GetHeap()->undefined_value(); + DisallowHeapAllocation no_gc; + Isolate* isolate = GetIsolate(); + if (IsJSGlobalProxy()) { + return JSGlobalProxy::cast(this)->hash(); + } + Object* stored_value = + GetHiddenProperty(isolate->factory()->identity_hash_string()); + return stored_value->IsSmi() + ? stored_value + : isolate->heap()->undefined_value(); } Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) { - Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate()); - if (hash->IsSmi()) - return hash; + if (object->IsJSGlobalProxy()) { + return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object)); + } Isolate* isolate = object->GetIsolate(); - hash = handle(object->GenerateIdentityHash(), isolate); - Handle<Object> result = SetHiddenProperty(object, - isolate->factory()->identity_hash_string(), hash); - - if (result->IsUndefined()) { - // Trying to get hash of detached proxy. - return handle(Smi::FromInt(0), isolate); - } + Handle<Object> hash(object->GetIdentityHash(), isolate); + if (hash->IsSmi()) return hash; + hash = handle(GenerateIdentityHash(isolate), isolate); + SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); return hash; } @@ -4816,21 +5146,16 @@ Object* JSProxy::GetIdentityHash() { Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) { - Isolate* isolate = proxy->GetIsolate(); - - Handle<Object> hash(proxy->GetIdentityHash(), isolate); - if (hash->IsSmi()) - return hash; - - hash = handle(proxy->GenerateIdentityHash(), isolate); - proxy->set_hash(*hash); - return hash; + return GetOrCreateIdentityHashHelper(proxy); } -Object* JSObject::GetHiddenProperty(Name* key) { +Object* JSObject::GetHiddenProperty(Handle<Name> key) { + DisallowHeapAllocation no_gc; ASSERT(key->IsUniqueName()); if (IsJSGlobalProxy()) { + // JSGlobalProxies store their hash internally. + ASSERT(*key != GetHeap()->identity_hash_string()); // For a proxy, use the prototype as target object. Object* proxy_parent = GetPrototype(); // If the proxy is detached, return undefined. @@ -4843,7 +5168,7 @@ Object* JSObject::GetHiddenProperty(Name* key) { if (inline_value->IsSmi()) { // Handle inline-stored identity hash. - if (key == GetHeap()->identity_hash_string()) { + if (*key == GetHeap()->identity_hash_string()) { return inline_value; } else { return GetHeap()->the_hole_value(); @@ -4865,6 +5190,8 @@ Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, ASSERT(key->IsUniqueName()); if (object->IsJSGlobalProxy()) { + // JSGlobalProxies store their hash internally. + ASSERT(*key != *isolate->factory()->identity_hash_string()); // For a proxy, use the prototype as target object. Handle<Object> proxy_parent(object->GetPrototype(), isolate); // If the proxy is detached, return undefined. @@ -4953,13 +5280,16 @@ Object* JSObject::GetHiddenPropertiesHashTable() { return GetHeap()->undefined_value(); } } else { - PropertyAttributes attributes; - // You can't install a getter on a property indexed by the hidden string, - // so we can be sure that GetLocalPropertyPostInterceptor returns a real - // object. - return GetLocalPropertyPostInterceptor(this, - GetHeap()->hidden_string(), - &attributes)->ToObjectUnchecked(); + Isolate* isolate = GetIsolate(); + LookupResult result(isolate); + LocalLookupRealNamedProperty(isolate->factory()->hidden_string(), &result); + if (result.IsFound()) { + ASSERT(result.IsNormal()); + ASSERT(result.holder() == this); + Object* value = GetNormalizedProperty(&result); + if (!value->IsTheHole()) return value; + } + return GetHeap()->undefined_value(); } } @@ -4973,9 +5303,8 @@ Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( return Handle<ObjectHashTable>::cast(inline_value); } - Handle<ObjectHashTable> hashtable = isolate->factory()->NewObjectHashTable( - kInitialCapacity, - USE_CUSTOM_MINIMUM_CAPACITY); + Handle<ObjectHashTable> hashtable = ObjectHashTable::New( + isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY); if (inline_value->IsSmi()) { // We were storing the identity hash inline and now allocated an actual @@ -4992,7 +5321,7 @@ Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( DONT_ENUM, OPTIMAL_REPRESENTATION, ALLOW_AS_CONSTANT, - OMIT_EXTENSIBILITY_CHECK); + OMIT_EXTENSIBILITY_CHECK).Assert(); return hashtable; } @@ -5017,9 +5346,7 @@ Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, int sorted_index = descriptors->GetSortedKeyIndex(0); if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string() && sorted_index < object->map()->NumberOfOwnDescriptors()) { - ASSERT(descriptors->GetType(sorted_index) == FIELD); - object->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), - *value); + object->WriteToField(sorted_index, *value); return object; } } @@ -5031,7 +5358,7 @@ Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, DONT_ENUM, OPTIMAL_REPRESENTATION, ALLOW_AS_CONSTANT, - OMIT_EXTENSIBILITY_CHECK); + OMIT_EXTENSIBILITY_CHECK).Assert(); return object; } @@ -5042,7 +5369,7 @@ Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, // Check local property, ignore interceptor. Isolate* isolate = object->GetIsolate(); LookupResult result(isolate); - object->LocalLookupRealNamedProperty(*name, &result); + object->LocalLookupRealNamedProperty(name, &result); if (!result.IsFound()) return isolate->factory()->true_value(); // Normalize object if needed. @@ -5052,8 +5379,8 @@ Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, } -Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object, - Handle<Name> name) { +MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor( + Handle<JSObject> object, Handle<Name> name) { Isolate* isolate = object->GetIsolate(); // TODO(rossberg): Support symbols in the API. @@ -5069,7 +5396,7 @@ Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object, isolate, interceptor->data(), *object, *object); v8::Handle<v8::Boolean> result = args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name))); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) { ASSERT(result->IsBoolean()); Handle<Object> result_internal = v8::Utils::OpenHandle(*result); @@ -5080,13 +5407,13 @@ Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object, } Handle<Object> result = DeletePropertyPostInterceptor(object, name, NORMAL_DELETION); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); return result; } -Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object, - uint32_t index) { +MaybeHandle<Object> JSObject::DeleteElementWithInterceptor( + Handle<JSObject> object, + uint32_t index) { Isolate* isolate = object->GetIsolate(); Factory* factory = isolate->factory(); @@ -5103,7 +5430,7 @@ Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object, PropertyCallbackArguments args( isolate, interceptor->data(), *object, *object); v8::Handle<v8::Boolean> result = args.Call(deleter, index); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) { ASSERT(result->IsBoolean()); Handle<Object> result_internal = v8::Utils::OpenHandle(*result); @@ -5111,24 +5438,23 @@ Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object, // Rebox CustomArguments::kReturnValueOffset before returning. return handle(*result_internal, isolate); } - Handle<Object> delete_result = object->GetElementsAccessor()->Delete( + MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete( object, index, NORMAL_DELETION); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); return delete_result; } -Handle<Object> JSObject::DeleteElement(Handle<JSObject> object, - uint32_t index, - DeleteMode mode) { +MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object, + uint32_t index, + DeleteMode mode) { Isolate* isolate = object->GetIsolate(); Factory* factory = isolate->factory(); // Check access rights if needed. if (object->IsAccessCheckNeeded() && - !isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_DELETE)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return factory->false_value(); } @@ -5158,21 +5484,24 @@ Handle<Object> JSObject::DeleteElement(Handle<JSObject> object, if (object->map()->is_observed()) { should_enqueue_change_record = HasLocalElement(object, index); if (should_enqueue_change_record) { - if (object->GetLocalElementAccessorPair(index) != NULL) { + if (!GetLocalElementAccessorPair(object, index).is_null()) { old_value = Handle<Object>::cast(factory->the_hole_value()); } else { - old_value = Object::GetElementNoExceptionThrown(isolate, object, index); + old_value = Object::GetElement( + isolate, object, index).ToHandleChecked(); } } } // Skip interceptor if forcing deletion. - Handle<Object> result; + MaybeHandle<Object> maybe_result; if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) { - result = DeleteElementWithInterceptor(object, index); + maybe_result = DeleteElementWithInterceptor(object, index); } else { - result = object->GetElementsAccessor()->Delete(object, index, mode); + maybe_result = object->GetElementsAccessor()->Delete(object, index, mode); } + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object); if (should_enqueue_change_record && !HasLocalElement(object, index)) { Handle<String> name = factory->Uint32ToString(index); @@ -5183,18 +5512,18 @@ Handle<Object> JSObject::DeleteElement(Handle<JSObject> object, } -Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, - Handle<Name> name, - DeleteMode mode) { +MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object, + Handle<Name> name, + DeleteMode mode) { Isolate* isolate = object->GetIsolate(); // ECMA-262, 3rd, 8.6.2.5 ASSERT(name->IsName()); // Check access rights if needed. if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_DELETE)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayNamedAccess(object, name, v8::ACCESS_DELETE)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->false_value(); } @@ -5212,7 +5541,7 @@ Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, } LookupResult lookup(isolate); - object->LocalLookup(*name, &lookup, true); + object->LocalLookup(name, &lookup, true); if (!lookup.IsFound()) return isolate->factory()->true_value(); // Ignore attributes if forcing a deletion. if (lookup.IsDontDelete() && mode != FORCE_DELETION) { @@ -5231,8 +5560,7 @@ Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, bool is_observed = object->map()->is_observed() && *name != isolate->heap()->hidden_string(); if (is_observed && lookup.IsDataProperty()) { - old_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, old_value); + old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); } Handle<Object> result; @@ -5242,7 +5570,10 @@ Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, if (mode == FORCE_DELETION) { result = DeletePropertyPostInterceptor(object, name, mode); } else { - result = DeletePropertyWithInterceptor(object, name); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + DeletePropertyWithInterceptor(object, name), + Object); } } else { // Normalize object if needed. @@ -5259,9 +5590,9 @@ Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, } -Handle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, - uint32_t index, - DeleteMode mode) { +MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, + uint32_t index, + DeleteMode mode) { if (object->IsJSProxy()) { return JSProxy::DeleteElementWithHandler( Handle<JSProxy>::cast(object), index, mode); @@ -5270,9 +5601,9 @@ Handle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, } -Handle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, - Handle<Name> name, - DeleteMode mode) { +MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, + Handle<Name> name, + DeleteMode mode) { if (object->IsJSProxy()) { return JSProxy::DeletePropertyWithHandler( Handle<JSProxy>::cast(object), name, mode); @@ -5417,17 +5748,16 @@ bool JSObject::ReferencesObject(Object* obj) { } -Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { +MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { Isolate* isolate = object->GetIsolate(); if (!object->map()->is_extensible()) return object; if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_KEYS); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayNamedAccess( + object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->false_value(); } @@ -5445,8 +5775,7 @@ Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { isolate->factory()->NewTypeError( "cant_prevent_ext_external_array_elements", HandleVector(&object, 1)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } // If there are fast elements we normalize. @@ -5483,8 +5812,11 @@ static void FreezeDictionary(Dictionary* dictionary) { PropertyDetails details = dictionary->DetailsAt(i); int attrs = DONT_DELETE; // READ_ONLY is an invalid attribute for JS setters/getters. - if (details.type() != CALLBACKS || - !dictionary->ValueAt(i)->IsAccessorPair()) { + if (details.type() == CALLBACKS) { + Object* v = dictionary->ValueAt(i); + if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value(); + if (!v->IsAccessorPair()) attrs |= READ_ONLY; + } else { attrs |= READ_ONLY; } details = details.CopyAddAttributes( @@ -5495,7 +5827,7 @@ static void FreezeDictionary(Dictionary* dictionary) { } -Handle<Object> JSObject::Freeze(Handle<JSObject> object) { +MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { // Freezing sloppy arguments should be handled elsewhere. ASSERT(!object->HasSloppyArgumentsElements()); ASSERT(!object->map()->is_observed()); @@ -5504,11 +5836,10 @@ Handle<Object> JSObject::Freeze(Handle<JSObject> object) { Isolate* isolate = object->GetIsolate(); if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_KEYS); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayNamedAccess( + object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->false_value(); } @@ -5526,8 +5857,7 @@ Handle<Object> JSObject::Freeze(Handle<JSObject> object) { isolate->factory()->NewTypeError( "cant_prevent_ext_external_array_elements", HandleVector(&object, 1)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } Handle<SeededNumberDictionary> new_element_dictionary; @@ -5539,8 +5869,7 @@ Handle<Object> JSObject::Freeze(Handle<JSObject> object) { int capacity = 0; int used = 0; object->GetElementsCapacityAndUsage(&capacity, &used); - new_element_dictionary = - isolate->factory()->NewSeededNumberDictionary(used); + new_element_dictionary = SeededNumberDictionary::New(isolate, used); // Move elements to a dictionary; avoid calling NormalizeElements to avoid // unnecessary transitions. @@ -5553,27 +5882,18 @@ Handle<Object> JSObject::Freeze(Handle<JSObject> object) { } } - LookupResult result(isolate); - Handle<Map> old_map(object->map()); - old_map->LookupTransition(*object, isolate->heap()->frozen_symbol(), &result); - if (result.IsTransition()) { - Handle<Map> transition_map(result.GetTransitionTarget()); + Handle<Map> old_map(object->map(), isolate); + int transition_index = old_map->SearchTransition( + isolate->heap()->frozen_symbol()); + if (transition_index != TransitionArray::kNotFound) { + Handle<Map> transition_map(old_map->GetTransition(transition_index)); ASSERT(transition_map->has_dictionary_elements()); ASSERT(transition_map->is_frozen()); ASSERT(!transition_map->is_extensible()); JSObject::MigrateToMap(object, transition_map); } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { // Create a new descriptor array with fully-frozen properties - int num_descriptors = old_map->NumberOfOwnDescriptors(); - Handle<DescriptorArray> new_descriptors = - DescriptorArray::CopyUpToAddAttributes( - handle(old_map->instance_descriptors()), num_descriptors, FROZEN); - Handle<Map> new_map = Map::CopyReplaceDescriptors( - old_map, new_descriptors, INSERT_TRANSITION, - isolate->factory()->frozen_symbol()); - new_map->freeze(); - new_map->set_is_extensible(false); - new_map->set_elements_kind(DICTIONARY_ELEMENTS); + Handle<Map> new_map = Map::CopyForFreeze(old_map); JSObject::MigrateToMap(object, new_map); } else { // Slow path: need to normalize properties for safety @@ -5609,34 +5929,33 @@ Handle<Object> JSObject::Freeze(Handle<JSObject> object) { void JSObject::SetObserved(Handle<JSObject> object) { + ASSERT(!object->IsJSGlobalProxy()); + ASSERT(!object->IsJSGlobalObject()); Isolate* isolate = object->GetIsolate(); - - if (object->map()->is_observed()) - return; - - LookupResult result(isolate); - object->map()->LookupTransition(*object, - isolate->heap()->observed_symbol(), - &result); - Handle<Map> new_map; - if (result.IsTransition()) { - new_map = handle(result.GetTransitionTarget()); + Handle<Map> old_map(object->map(), isolate); + ASSERT(!old_map->is_observed()); + int transition_index = old_map->SearchTransition( + isolate->heap()->observed_symbol()); + if (transition_index != TransitionArray::kNotFound) { + new_map = handle(old_map->GetTransition(transition_index), isolate); ASSERT(new_map->is_observed()); - } else if (object->map()->CanHaveMoreTransitions()) { - new_map = Map::CopyForObserved(handle(object->map())); + } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { + new_map = Map::CopyForObserved(old_map); } else { - new_map = Map::Copy(handle(object->map())); + new_map = Map::Copy(old_map); new_map->set_is_observed(); } JSObject::MigrateToMap(object, new_map); } -Handle<JSObject> JSObject::Copy(Handle<JSObject> object) { +Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object, + Representation representation, + int index) { Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - isolate->heap()->CopyJSObject(*object), JSObject); + Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate); + return Object::NewStorageFor(isolate, raw_value, representation); } @@ -5649,13 +5968,14 @@ class JSObjectWalkVisitor { copying_(copying), hints_(hints) {} - Handle<JSObject> StructureWalk(Handle<JSObject> object); + MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object); protected: - inline Handle<JSObject> VisitElementOrProperty(Handle<JSObject> object, - Handle<JSObject> value) { + MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty( + Handle<JSObject> object, + Handle<JSObject> value) { Handle<AllocationSite> current_site = site_context()->EnterNewScope(); - Handle<JSObject> copy_of_value = StructureWalk(value); + MaybeHandle<JSObject> copy_of_value = StructureWalk(value); site_context()->ExitScope(current_site, value); return copy_of_value; } @@ -5673,7 +5993,7 @@ class JSObjectWalkVisitor { template <class ContextObject> -Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( +MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( Handle<JSObject> object) { Isolate* isolate = this->isolate(); bool copying = this->copying(); @@ -5684,7 +6004,7 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( if (check.HasOverflowed()) { isolate->StackOverflow(); - return Handle<JSObject>::null(); + return MaybeHandle<JSObject>(); } } @@ -5698,14 +6018,8 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( if (site_context()->ShouldCreateMemento(object)) { site_to_pass = site_context()->current(); } - CALL_AND_RETRY_OR_DIE(isolate, - isolate->heap()->CopyJSObject(*object, - site_to_pass.is_null() ? NULL : *site_to_pass), - { copy = Handle<JSObject>(JSObject::cast(__object__), - isolate); - break; - }, - return Handle<JSObject>()); + copy = isolate->factory()->CopyJSObjectWithAllocationSite( + object, site_to_pass); } else { copy = object; } @@ -5732,11 +6046,13 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( int index = descriptors->GetFieldIndex(i); Handle<Object> value(object->RawFastPropertyAt(index), isolate); if (value->IsJSObject()) { - value = VisitElementOrProperty(copy, Handle<JSObject>::cast(value)); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, value, Handle<JSObject>()); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, + VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), + JSObject); } else { Representation representation = details.representation(); - value = NewStorageFor(isolate, value, representation); + value = Object::NewStorageFor(isolate, value, representation); } if (copying) { copy->FastPropertyAtPut(index, *value); @@ -5755,17 +6071,18 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( // In particular, don't try to copy the length attribute of // an array. if (attributes != NONE) continue; - Handle<Object> value( - copy->GetProperty(*key_string, &attributes)->ToObjectUnchecked(), - isolate); + Handle<Object> value = + Object::GetProperty(copy, key_string).ToHandleChecked(); if (value->IsJSObject()) { - Handle<JSObject> result = VisitElementOrProperty( - copy, Handle<JSObject>::cast(value)); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); + Handle<JSObject> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), + JSObject); if (copying) { // Creating object copy for literals. No strict mode needed. - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetProperty( - copy, key_string, result, NONE, SLOPPY)); + JSObject::SetProperty( + copy, key_string, result, NONE, SLOPPY).Assert(); } } } @@ -5793,9 +6110,11 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( value->IsTheHole() || (IsFastObjectElementsKind(copy->GetElementsKind()))); if (value->IsJSObject()) { - Handle<JSObject> result = VisitElementOrProperty( - copy, Handle<JSObject>::cast(value)); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); + Handle<JSObject> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), + JSObject); if (copying) { elements->set(i, *result); } @@ -5813,9 +6132,11 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( if (element_dictionary->IsKey(k)) { Handle<Object> value(element_dictionary->ValueAt(i), isolate); if (value->IsJSObject()) { - Handle<JSObject> result = VisitElementOrProperty( - copy, Handle<JSObject>::cast(value)); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); + Handle<JSObject> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), + JSObject); if (copying) { element_dictionary->ValueAtPut(i, *result); } @@ -5847,27 +6168,65 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( } -Handle<JSObject> JSObject::DeepWalk( +MaybeHandle<JSObject> JSObject::DeepWalk( Handle<JSObject> object, AllocationSiteCreationContext* site_context) { JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false, kNoHints); - Handle<JSObject> result = v.StructureWalk(object); - ASSERT(result.is_null() || result.is_identical_to(object)); + MaybeHandle<JSObject> result = v.StructureWalk(object); + Handle<JSObject> for_assert; + ASSERT(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object)); return result; } -Handle<JSObject> JSObject::DeepCopy(Handle<JSObject> object, - AllocationSiteUsageContext* site_context, - DeepCopyHints hints) { +MaybeHandle<JSObject> JSObject::DeepCopy( + Handle<JSObject> object, + AllocationSiteUsageContext* site_context, + DeepCopyHints hints) { JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints); - Handle<JSObject> copy = v.StructureWalk(object); - ASSERT(!copy.is_identical_to(object)); + MaybeHandle<JSObject> copy = v.StructureWalk(object); + Handle<JSObject> for_assert; + ASSERT(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object)); return copy; } +Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object, + Handle<Name> key) { + Isolate* isolate = object->GetIsolate(); + LookupResult lookup(isolate); + { + DisallowHeapAllocation no_allocation; + object->LookupRealNamedProperty(key, &lookup); + } + Handle<Object> result = isolate->factory()->undefined_value(); + if (lookup.IsFound() && !lookup.IsTransition()) { + switch (lookup.type()) { + case NORMAL: + result = GetNormalizedProperty( + Handle<JSObject>(lookup.holder(), isolate), &lookup); + break; + case FIELD: + result = FastPropertyAt(Handle<JSObject>(lookup.holder(), isolate), + lookup.representation(), + lookup.GetFieldIndex().field_index()); + break; + case CONSTANT: + result = Handle<Object>(lookup.GetConstant(), isolate); + break; + case CALLBACKS: + case HANDLER: + case INTERCEPTOR: + break; + case NONEXISTENT: + UNREACHABLE(); + } + } + return result; +} + + // Tests for the fast common case for property enumeration: // - This object and all prototypes has an enum cache (which means that // it is no proxy, has no interceptors and needs no access checks). @@ -5941,24 +6300,11 @@ int Map::NextFreePropertyIndex() { } -AccessorDescriptor* Map::FindAccessor(Name* name) { - DescriptorArray* descs = instance_descriptors(); - int number_of_own_descriptors = NumberOfOwnDescriptors(); - for (int i = 0; i < number_of_own_descriptors; i++) { - if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) { - return descs->GetCallbacks(i); - } - } - return NULL; -} - - void JSReceiver::LocalLookup( - Name* name, LookupResult* result, bool search_hidden_prototypes) { + Handle<Name> name, LookupResult* result, bool search_hidden_prototypes) { + DisallowHeapAllocation no_gc; ASSERT(name->IsName()); - Heap* heap = GetHeap(); - if (IsJSGlobalProxy()) { Object* proto = GetPrototype(); if (proto->IsNull()) return result->NotFound(); @@ -5982,7 +6328,7 @@ void JSReceiver::LocalLookup( // Check for lookup interceptor except when bootstrapping. if (js_object->HasNamedInterceptor() && - !heap->isolate()->bootstrapper()->IsActive()) { + !GetIsolate()->bootstrapper()->IsActive()) { result->InterceptorResult(js_object); return; } @@ -5999,11 +6345,12 @@ void JSReceiver::LocalLookup( } -void JSReceiver::Lookup(Name* name, LookupResult* result) { +void JSReceiver::Lookup(Handle<Name> name, LookupResult* result) { + DisallowHeapAllocation no_gc; // Ecma-262 3rd 8.6.2.4 - Heap* heap = GetHeap(); + Handle<Object> null_value = GetIsolate()->factory()->null_value(); for (Object* current = this; - current != heap->null_value(); + current != *null_value; current = JSObject::cast(current)->GetPrototype()) { JSReceiver::cast(current)->LocalLookup(name, result, false); if (result->IsFound()) return; @@ -6013,10 +6360,11 @@ void JSReceiver::Lookup(Name* name, LookupResult* result) { // Search object and its prototype chain for callback properties. -void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) { - Heap* heap = GetHeap(); +void JSObject::LookupCallbackProperty(Handle<Name> name, LookupResult* result) { + DisallowHeapAllocation no_gc; + Handle<Object> null_value = GetIsolate()->factory()->null_value(); for (Object* current = this; - current != heap->null_value() && current->IsJSObject(); + current != *null_value && current->IsJSObject(); current = JSObject::cast(current)->GetPrototype()) { JSObject::cast(current)->LocalLookupRealNamedProperty(name, result); if (result->IsPropertyCallbacks()) return; @@ -6025,6 +6373,247 @@ void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) { } +static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { + int len = array->length(); + for (int i = 0; i < len; i++) { + Object* e = array->get(i); + if (!(e->IsString() || e->IsNumber())) return false; + } + return true; +} + + +static Handle<FixedArray> ReduceFixedArrayTo( + Handle<FixedArray> array, int length) { + ASSERT(array->length() >= length); + if (array->length() == length) return array; + + Handle<FixedArray> new_array = + array->GetIsolate()->factory()->NewFixedArray(length); + for (int i = 0; i < length; ++i) new_array->set(i, array->get(i)); + return new_array; +} + + +static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, + bool cache_result) { + Isolate* isolate = object->GetIsolate(); + if (object->HasFastProperties()) { + int own_property_count = object->map()->EnumLength(); + // If the enum length of the given map is set to kInvalidEnumCache, this + // means that the map itself has never used the present enum cache. The + // first step to using the cache is to set the enum length of the map by + // counting the number of own descriptors that are not DONT_ENUM or + // SYMBOLIC. + if (own_property_count == kInvalidEnumCacheSentinel) { + own_property_count = object->map()->NumberOfDescribedProperties( + OWN_DESCRIPTORS, DONT_SHOW); + } else { + ASSERT(own_property_count == object->map()->NumberOfDescribedProperties( + OWN_DESCRIPTORS, DONT_SHOW)); + } + + if (object->map()->instance_descriptors()->HasEnumCache()) { + DescriptorArray* desc = object->map()->instance_descriptors(); + Handle<FixedArray> keys(desc->GetEnumCache(), isolate); + + // In case the number of properties required in the enum are actually + // present, we can reuse the enum cache. Otherwise, this means that the + // enum cache was generated for a previous (smaller) version of the + // Descriptor Array. In that case we regenerate the enum cache. + if (own_property_count <= keys->length()) { + if (cache_result) object->map()->SetEnumLength(own_property_count); + isolate->counters()->enum_cache_hits()->Increment(); + return ReduceFixedArrayTo(keys, own_property_count); + } + } + + Handle<Map> map(object->map()); + + if (map->instance_descriptors()->IsEmpty()) { + isolate->counters()->enum_cache_hits()->Increment(); + if (cache_result) map->SetEnumLength(0); + return isolate->factory()->empty_fixed_array(); + } + + isolate->counters()->enum_cache_misses()->Increment(); + + Handle<FixedArray> storage = isolate->factory()->NewFixedArray( + own_property_count); + Handle<FixedArray> indices = isolate->factory()->NewFixedArray( + own_property_count); + + Handle<DescriptorArray> descs = + Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate); + + int size = map->NumberOfOwnDescriptors(); + int index = 0; + + for (int i = 0; i < size; i++) { + PropertyDetails details = descs->GetDetails(i); + Object* key = descs->GetKey(i); + if (!(details.IsDontEnum() || key->IsSymbol())) { + storage->set(index, key); + if (!indices.is_null()) { + if (details.type() != FIELD) { + indices = Handle<FixedArray>(); + } else { + int field_index = descs->GetFieldIndex(i); + if (field_index >= map->inobject_properties()) { + field_index = -(field_index - map->inobject_properties() + 1); + } + field_index = field_index << 1; + if (details.representation().IsDouble()) { + field_index |= 1; + } + indices->set(index, Smi::FromInt(field_index)); + } + } + index++; + } + } + ASSERT(index == storage->length()); + + Handle<FixedArray> bridge_storage = + isolate->factory()->NewFixedArray( + DescriptorArray::kEnumCacheBridgeLength); + DescriptorArray* desc = object->map()->instance_descriptors(); + desc->SetEnumCache(*bridge_storage, + *storage, + indices.is_null() ? Object::cast(Smi::FromInt(0)) + : Object::cast(*indices)); + if (cache_result) { + object->map()->SetEnumLength(own_property_count); + } + return storage; + } else { + Handle<NameDictionary> dictionary(object->property_dictionary()); + int length = dictionary->NumberOfEnumElements(); + if (length == 0) { + return Handle<FixedArray>(isolate->heap()->empty_fixed_array()); + } + Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length); + dictionary->CopyEnumKeysTo(*storage); + return storage; + } +} + + +MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, + KeyCollectionType type) { + USE(ContainsOnlyValidKeys); + Isolate* isolate = object->GetIsolate(); + Handle<FixedArray> content = isolate->factory()->empty_fixed_array(); + Handle<JSObject> arguments_boilerplate = Handle<JSObject>( + isolate->context()->native_context()->sloppy_arguments_boilerplate(), + isolate); + Handle<JSFunction> arguments_function = Handle<JSFunction>( + JSFunction::cast(arguments_boilerplate->map()->constructor()), + isolate); + + // Only collect keys if access is permitted. + for (Handle<Object> p = object; + *p != isolate->heap()->null_value(); + p = Handle<Object>(p->GetPrototype(isolate), isolate)) { + if (p->IsJSProxy()) { + Handle<JSProxy> proxy(JSProxy::cast(*p), isolate); + Handle<Object> args[] = { proxy }; + Handle<Object> names; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, names, + Execution::Call(isolate, + isolate->proxy_enumerate(), + object, + ARRAY_SIZE(args), + args), + FixedArray); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, content, + FixedArray::AddKeysFromArrayLike( + content, Handle<JSObject>::cast(names)), + FixedArray); + break; + } + + Handle<JSObject> current(JSObject::cast(*p), isolate); + + // Check access rights if required. + if (current->IsAccessCheckNeeded() && + !isolate->MayNamedAccess( + current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray); + break; + } + + // Compute the element keys. + Handle<FixedArray> element_keys = + isolate->factory()->NewFixedArray(current->NumberOfEnumElements()); + current->GetEnumElementKeys(*element_keys); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, content, + FixedArray::UnionOfKeys(content, element_keys), + FixedArray); + ASSERT(ContainsOnlyValidKeys(content)); + + // Add the element keys from the interceptor. + if (current->HasIndexedInterceptor()) { + Handle<JSObject> result; + if (JSObject::GetKeysForIndexedInterceptor( + current, object).ToHandle(&result)) { + ASSIGN_RETURN_ON_EXCEPTION( + isolate, content, + FixedArray::AddKeysFromArrayLike(content, result), + FixedArray); + } + ASSERT(ContainsOnlyValidKeys(content)); + } + + // We can cache the computed property keys if access checks are + // not needed and no interceptors are involved. + // + // We do not use the cache if the object has elements and + // therefore it does not make sense to cache the property names + // for arguments objects. Arguments objects will always have + // elements. + // Wrapped strings have elements, but don't have an elements + // array or dictionary. So the fast inline test for whether to + // use the cache says yes, so we should not create a cache. + bool cache_enum_keys = + ((current->map()->constructor() != *arguments_function) && + !current->IsJSValue() && + !current->IsAccessCheckNeeded() && + !current->HasNamedInterceptor() && + !current->HasIndexedInterceptor()); + // Compute the property keys and cache them if possible. + ASSIGN_RETURN_ON_EXCEPTION( + isolate, content, + FixedArray::UnionOfKeys( + content, GetEnumPropertyKeys(current, cache_enum_keys)), + FixedArray); + ASSERT(ContainsOnlyValidKeys(content)); + + // Add the property keys from the interceptor. + if (current->HasNamedInterceptor()) { + Handle<JSObject> result; + if (JSObject::GetKeysForNamedInterceptor( + current, object).ToHandle(&result)) { + ASSIGN_RETURN_ON_EXCEPTION( + isolate, content, + FixedArray::AddKeysFromArrayLike(content, result), + FixedArray); + } + ASSERT(ContainsOnlyValidKeys(content)); + } + + // If we only want local properties we bail out after the first + // iteration. + if (type == LOCAL_ONLY) break; + } + return content; +} + + // Try to update an accessor in an elements dictionary. Return true if the // update succeeded, and false otherwise. static bool UpdateGetterSetterInDictionary( @@ -6124,7 +6713,7 @@ Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object, Handle<Name> name) { Isolate* isolate = object->GetIsolate(); LookupResult result(isolate); - object->LocalLookupRealNamedProperty(*name, &result); + object->LocalLookupRealNamedProperty(name, &result); if (result.IsPropertyCallbacks()) { // Note that the result can actually have IsDontDelete() == true when we // e.g. have to fall back to the slow case while adding a setter after @@ -6171,7 +6760,7 @@ void JSObject::DefinePropertyAccessor(Handle<JSObject> object, bool JSObject::CanSetCallback(Handle<JSObject> object, Handle<Name> name) { Isolate* isolate = object->GetIsolate(); ASSERT(!object->IsAccessCheckNeeded() || - isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)); + isolate->MayNamedAccess(object, name, v8::ACCESS_SET)); // Check if there is an API defined callback object which prohibits // callback overwriting in this object or its prototype chain. @@ -6180,7 +6769,7 @@ bool JSObject::CanSetCallback(Handle<JSObject> object, Handle<Name> name) { // to be overwritten because allowing overwriting could potentially // cause security problems. LookupResult callback_result(isolate); - object->LookupCallbackProperty(*name, &callback_result); + object->LookupCallbackProperty(name, &callback_result); if (callback_result.IsFound()) { Object* callback_obj = callback_result.GetCallbackObject(); if (callback_obj->IsAccessorInfo()) { @@ -6294,8 +6883,9 @@ void JSObject::DefineAccessor(Handle<JSObject> object, Isolate* isolate = object->GetIsolate(); // Check access rights if needed. if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET); + !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return; } @@ -6317,7 +6907,7 @@ void JSObject::DefineAccessor(Handle<JSObject> object, AssertNoContextChange ncc(isolate); // Try to flatten before operating on the string. - if (name->IsString()) String::cast(*name)->TryFlatten(); + if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); if (!JSObject::CanSetCallback(object, name)) return; @@ -6331,16 +6921,17 @@ void JSObject::DefineAccessor(Handle<JSObject> object, if (is_observed) { if (is_element) { preexists = HasLocalElement(object, index); - if (preexists && object->GetLocalElementAccessorPair(index) == NULL) { - old_value = Object::GetElementNoExceptionThrown(isolate, object, index); + if (preexists && GetLocalElementAccessorPair(object, index).is_null()) { + old_value = + Object::GetElement(isolate, object, index).ToHandleChecked(); } } else { LookupResult lookup(isolate); - object->LocalLookup(*name, &lookup, true); + object->LocalLookup(name, &lookup, true); preexists = lookup.IsProperty(); if (preexists && lookup.IsDataProperty()) { - old_value = Object::GetProperty(object, name); - CHECK_NOT_EMPTY_HANDLE(isolate, old_value); + old_value = + Object::GetPropertyOrElement(object, name).ToHandleChecked(); } } } @@ -6389,25 +6980,6 @@ static bool TryAccessorTransition(Handle<JSObject> self, } -static MaybeObject* CopyInsertDescriptor(Map* map, - Name* name, - AccessorPair* accessors, - PropertyAttributes attributes) { - CallbacksDescriptor new_accessors_desc(name, accessors, attributes); - return map->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION); -} - - -static Handle<Map> CopyInsertDescriptor(Handle<Map> map, - Handle<Name> name, - Handle<AccessorPair> accessors, - PropertyAttributes attributes) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - CopyInsertDescriptor(*map, *name, *accessors, attributes), - Map); -} - - bool JSObject::DefineFastAccessor(Handle<JSObject> object, Handle<Name> name, AccessorComponent component, @@ -6416,7 +6988,7 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined()); Isolate* isolate = object->GetIsolate(); LookupResult result(isolate); - object->LocalLookup(*name, &result); + object->LocalLookup(name, &result); if (result.IsFound() && !result.IsPropertyCallbacks()) { return false; @@ -6458,8 +7030,8 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, if (result.IsFound()) { Handle<Map> target(result.GetTransitionTarget()); int descriptor_number = target->LastAdded(); - ASSERT(target->instance_descriptors()->GetKey(descriptor_number) - ->Equals(*name)); + ASSERT(Name::Equals(name, + handle(target->instance_descriptors()->GetKey(descriptor_number)))); return TryAccessorTransition(object, target, descriptor_number, component, accessor, attributes); } @@ -6472,24 +7044,27 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors)) : isolate->factory()->NewAccessorPair(); accessors->set(component, *accessor); - Handle<Map> new_map = CopyInsertDescriptor(Handle<Map>(object->map()), - name, accessors, attributes); + + CallbacksDescriptor new_accessors_desc(name, accessors, attributes); + Handle<Map> new_map = Map::CopyInsertDescriptor( + handle(object->map()), &new_accessors_desc, INSERT_TRANSITION); + JSObject::MigrateToMap(object, new_map); return true; } -Handle<Object> JSObject::SetAccessor(Handle<JSObject> object, - Handle<AccessorInfo> info) { +MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object, + Handle<AccessorInfo> info) { Isolate* isolate = object->GetIsolate(); Factory* factory = isolate->factory(); Handle<Name> name(Name::cast(info->name())); // Check access rights if needed. if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return factory->undefined_value(); } @@ -6505,7 +7080,7 @@ Handle<Object> JSObject::SetAccessor(Handle<JSObject> object, AssertNoContextChange ncc(isolate); // Try to flatten before operating on the string. - if (name->IsString()) FlattenString(Handle<String>::cast(name)); + if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); if (!JSObject::CanSetCallback(object, name)) { return factory->undefined_value(); @@ -6548,7 +7123,7 @@ Handle<Object> JSObject::SetAccessor(Handle<JSObject> object, } else { // Lookup the name. LookupResult result(isolate); - object->LocalLookup(*name, &result, true); + object->LocalLookup(name, &result, true); // ES5 forbids turning a property into an accessor if it's not // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5). if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) { @@ -6562,9 +7137,9 @@ Handle<Object> JSObject::SetAccessor(Handle<JSObject> object, } -Handle<Object> JSObject::GetAccessor(Handle<JSObject> object, - Handle<Name> name, - AccessorComponent component) { +MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object, + Handle<Name> name, + AccessorComponent component) { Isolate* isolate = object->GetIsolate(); // Make sure that the top context does not change when doing callbacks or @@ -6573,9 +7148,9 @@ Handle<Object> JSObject::GetAccessor(Handle<JSObject> object, // Check access rights if needed. if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + !isolate->MayNamedAccess(object, name, v8::ACCESS_HAS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return isolate->factory()->undefined_value(); } @@ -6604,7 +7179,7 @@ Handle<Object> JSObject::GetAccessor(Handle<JSObject> object, !obj->IsNull(); obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) { LookupResult result(isolate); - JSReceiver::cast(*obj)->LocalLookup(*name, &result); + JSReceiver::cast(*obj)->LocalLookup(name, &result); if (result.IsFound()) { if (result.IsReadOnly()) return isolate->factory()->undefined_value(); if (result.IsPropertyCallbacks()) { @@ -6649,31 +7224,20 @@ Object* JSObject::SlowReverseLookup(Object* value) { } -Handle<Map> Map::RawCopy(Handle<Map> map, - int instance_size) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - map->RawCopy(instance_size), - Map); -} - - -MaybeObject* Map::RawCopy(int instance_size) { - Map* result; - MaybeObject* maybe_result = - GetHeap()->AllocateMap(instance_type(), instance_size); - if (!maybe_result->To(&result)) return maybe_result; - - result->set_prototype(prototype()); - result->set_constructor(constructor()); - result->set_bit_field(bit_field()); - result->set_bit_field2(bit_field2()); - int new_bit_field3 = bit_field3(); +Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) { + Handle<Map> result = map->GetIsolate()->factory()->NewMap( + map->instance_type(), instance_size); + result->set_prototype(map->prototype()); + result->set_constructor(map->constructor()); + result->set_bit_field(map->bit_field()); + result->set_bit_field2(map->bit_field2()); + int new_bit_field3 = map->bit_field3(); new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true); new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0); new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCacheSentinel); new_bit_field3 = Deprecated::update(new_bit_field3, false); - if (!is_dictionary_map()) { + if (!map->is_dictionary_map()) { new_bit_field3 = IsUnstable::update(new_bit_field3, false); } result->set_bit_field3(new_bit_field3); @@ -6681,6 +7245,50 @@ MaybeObject* Map::RawCopy(int instance_size) { } +Handle<Map> Map::Normalize(Handle<Map> fast_map, + PropertyNormalizationMode mode) { + ASSERT(!fast_map->is_dictionary_map()); + + Isolate* isolate = fast_map->GetIsolate(); + Handle<NormalizedMapCache> cache( + isolate->context()->native_context()->normalized_map_cache()); + + Handle<Map> new_map; + if (cache->Get(fast_map, mode).ToHandle(&new_map)) { +#ifdef VERIFY_HEAP + if (FLAG_verify_heap) { + new_map->SharedMapVerify(); + } +#endif +#ifdef ENABLE_SLOW_ASSERTS + if (FLAG_enable_slow_asserts) { + // The cached map should match newly created normalized map bit-by-bit, + // except for the code cache, which can contain some ics which can be + // applied to the shared map. + Handle<Map> fresh = Map::CopyNormalized( + fast_map, mode, SHARED_NORMALIZED_MAP); + + ASSERT(memcmp(fresh->address(), + new_map->address(), + Map::kCodeCacheOffset) == 0); + STATIC_ASSERT(Map::kDependentCodeOffset == + Map::kCodeCacheOffset + kPointerSize); + int offset = Map::kDependentCodeOffset + kPointerSize; + ASSERT(memcmp(fresh->address() + offset, + new_map->address() + offset, + Map::kSize - offset) == 0); + } +#endif + } else { + new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP); + cache->Set(fast_map, new_map); + isolate->counters()->normalized_maps()->Increment(); + } + fast_map->NotifyLeafMapLayoutChange(); + return new_map; +} + + Handle<Map> Map::CopyNormalized(Handle<Map> map, PropertyNormalizationMode mode, NormalizedMapSharingMode sharing) { @@ -6689,7 +7297,7 @@ Handle<Map> Map::CopyNormalized(Handle<Map> map, new_instance_size -= map->inobject_properties() * kPointerSize; } - Handle<Map> result = Map::RawCopy(map, new_instance_size); + Handle<Map> result = RawCopy(map, new_instance_size); if (mode != CLEAR_INOBJECT_PROPERTIES) { result->set_inobject_properties(map->inobject_properties()); @@ -6710,99 +7318,57 @@ Handle<Map> Map::CopyNormalized(Handle<Map> map, Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) { - CALL_HEAP_FUNCTION(map->GetIsolate(), map->CopyDropDescriptors(), Map); -} - - -MaybeObject* Map::CopyDropDescriptors() { - Map* result; - MaybeObject* maybe_result = RawCopy(instance_size()); - if (!maybe_result->To(&result)) return maybe_result; + Handle<Map> result = RawCopy(map, map->instance_size()); // Please note instance_type and instance_size are set when allocated. - result->set_inobject_properties(inobject_properties()); - result->set_unused_property_fields(unused_property_fields()); + result->set_inobject_properties(map->inobject_properties()); + result->set_unused_property_fields(map->unused_property_fields()); - result->set_pre_allocated_property_fields(pre_allocated_property_fields()); + result->set_pre_allocated_property_fields( + map->pre_allocated_property_fields()); result->set_is_shared(false); - result->ClearCodeCache(GetHeap()); - NotifyLeafMapLayoutChange(); + result->ClearCodeCache(map->GetHeap()); + map->NotifyLeafMapLayoutChange(); return result; } -MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors, - Descriptor* descriptor) { +Handle<Map> Map::ShareDescriptor(Handle<Map> map, + Handle<DescriptorArray> descriptors, + Descriptor* descriptor) { // Sanity check. This path is only to be taken if the map owns its descriptor // array, implying that its NumberOfOwnDescriptors equals the number of // descriptors in the descriptor array. - ASSERT(NumberOfOwnDescriptors() == - instance_descriptors()->number_of_descriptors()); - Map* result; - MaybeObject* maybe_result = CopyDropDescriptors(); - if (!maybe_result->To(&result)) return maybe_result; + ASSERT(map->NumberOfOwnDescriptors() == + map->instance_descriptors()->number_of_descriptors()); - Name* name = descriptor->GetKey(); - - TransitionArray* transitions; - MaybeObject* maybe_transitions = - AddTransition(name, result, SIMPLE_TRANSITION); - if (!maybe_transitions->To(&transitions)) return maybe_transitions; - - int old_size = descriptors->number_of_descriptors(); - - DescriptorArray* new_descriptors; - - if (descriptors->NumberOfSlackDescriptors() > 0) { - new_descriptors = descriptors; - new_descriptors->Append(descriptor); - } else { - // Descriptor arrays grow by 50%. - MaybeObject* maybe_descriptors = DescriptorArray::Allocate( - GetIsolate(), old_size, old_size < 4 ? 1 : old_size / 2); - if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; - - DescriptorArray::WhitenessWitness witness(new_descriptors); + Handle<Map> result = CopyDropDescriptors(map); + Handle<Name> name = descriptor->GetKey(); + Handle<TransitionArray> transitions = + TransitionArray::CopyInsert(map, name, result, SIMPLE_TRANSITION); - // Copy the descriptors, inserting a descriptor. - for (int i = 0; i < old_size; ++i) { - new_descriptors->CopyFrom(i, descriptors, i, witness); + // Ensure there's space for the new descriptor in the shared descriptor array. + if (descriptors->NumberOfSlackDescriptors() == 0) { + int old_size = descriptors->number_of_descriptors(); + if (old_size == 0) { + descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1); + } else { + EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2); + descriptors = handle(map->instance_descriptors()); } + } - new_descriptors->Append(descriptor, witness); - - if (old_size > 0) { - // If the source descriptors had an enum cache we copy it. This ensures - // that the maps to which we push the new descriptor array back can rely - // on a cache always being available once it is set. If the map has more - // enumerated descriptors than available in the original cache, the cache - // will be lazily replaced by the extended cache when needed. - if (descriptors->HasEnumCache()) { - new_descriptors->CopyEnumCacheFrom(descriptors); - } - - Map* map; - // Replace descriptors by new_descriptors in all maps that share it. - - GetHeap()->incremental_marking()->RecordWrites(descriptors); - for (Object* current = GetBackPointer(); - !current->IsUndefined(); - current = map->GetBackPointer()) { - map = Map::cast(current); - if (map->instance_descriptors() != descriptors) break; - map->set_instance_descriptors(new_descriptors); - } + // Commit the state atomically. + DisallowHeapAllocation no_gc; - set_instance_descriptors(new_descriptors); - } - } + descriptors->Append(descriptor); + result->SetBackPointer(*map); + result->InitializeDescriptors(*descriptors); - result->SetBackPointer(this); - result->InitializeDescriptors(new_descriptors); - ASSERT(result->NumberOfOwnDescriptors() == NumberOfOwnDescriptors() + 1); + ASSERT(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1); - set_transitions(transitions); - set_owns_descriptors(false); + map->set_transitions(*transitions); + map->set_owns_descriptors(false); return result; } @@ -6811,33 +7377,28 @@ MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors, Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, Handle<DescriptorArray> descriptors, TransitionFlag flag, - Handle<Name> name) { - CALL_HEAP_FUNCTION(map->GetIsolate(), - map->CopyReplaceDescriptors(*descriptors, flag, *name), - Map); -} - - -MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors, - TransitionFlag flag, - Name* name, - SimpleTransitionFlag simple_flag) { + MaybeHandle<Name> maybe_name, + SimpleTransitionFlag simple_flag) { ASSERT(descriptors->IsSortedNoDuplicates()); - Map* result; - MaybeObject* maybe_result = CopyDropDescriptors(); - if (!maybe_result->To(&result)) return maybe_result; - - result->InitializeDescriptors(descriptors); + Handle<Map> result = CopyDropDescriptors(map); + result->InitializeDescriptors(*descriptors); - if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) { - TransitionArray* transitions; - MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag); - if (!maybe_transitions->To(&transitions)) return maybe_transitions; - set_transitions(transitions); - result->SetBackPointer(this); + if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) { + Handle<Name> name; + CHECK(maybe_name.ToHandle(&name)); + Handle<TransitionArray> transitions = TransitionArray::CopyInsert( + map, name, result, simple_flag); + map->set_transitions(*transitions); + result->SetBackPointer(*map); } else { - descriptors->InitializeRepresentations(Representation::Tagged()); + int length = descriptors->number_of_descriptors(); + for (int i = 0; i < length; i++) { + descriptors->SetRepresentation(i, Representation::Tagged()); + if (descriptors->GetDetails(i).type() == FIELD) { + descriptors->SetValue(i, HeapType::Any()); + } + } } return result; @@ -6851,7 +7412,7 @@ Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, Handle<DescriptorArray> descriptors) { ASSERT(descriptors->IsSortedNoDuplicates()); - Handle<Map> result = Map::CopyDropDescriptors(map); + Handle<Map> result = CopyDropDescriptors(map); result->InitializeDescriptors(*descriptors); result->SetNumberOfOwnDescriptors(new_descriptor + 1); @@ -6868,8 +7429,8 @@ Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, result->set_owns_descriptors(false); Handle<Name> name = handle(descriptors->GetKey(new_descriptor)); - Handle<TransitionArray> transitions = Map::AddTransition(map, name, result, - SIMPLE_TRANSITION); + Handle<TransitionArray> transitions = TransitionArray::CopyInsert( + map, name, result, SIMPLE_TRANSITION); map->set_transitions(*transitions); result->SetBackPointer(*map); @@ -6878,52 +7439,48 @@ Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, } -MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) { +Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind, + TransitionFlag flag) { if (flag == INSERT_TRANSITION) { - ASSERT(!HasElementsTransition() || - ((elements_transition_map()->elements_kind() == DICTIONARY_ELEMENTS || + ASSERT(!map->HasElementsTransition() || + ((map->elements_transition_map()->elements_kind() == + DICTIONARY_ELEMENTS || IsExternalArrayElementsKind( - elements_transition_map()->elements_kind())) && + map->elements_transition_map()->elements_kind())) && (kind == DICTIONARY_ELEMENTS || IsExternalArrayElementsKind(kind)))); ASSERT(!IsFastElementsKind(kind) || - IsMoreGeneralElementsKindTransition(elements_kind(), kind)); - ASSERT(kind != elements_kind()); + IsMoreGeneralElementsKindTransition(map->elements_kind(), kind)); + ASSERT(kind != map->elements_kind()); } bool insert_transition = - flag == INSERT_TRANSITION && !HasElementsTransition(); + flag == INSERT_TRANSITION && !map->HasElementsTransition(); - if (insert_transition && owns_descriptors()) { + if (insert_transition && map->owns_descriptors()) { // In case the map owned its own descriptors, share the descriptors and // transfer ownership to the new map. - Map* new_map; - MaybeObject* maybe_new_map = CopyDropDescriptors(); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; + Handle<Map> new_map = CopyDropDescriptors(map); - MaybeObject* added_elements = set_elements_transition_map(new_map); - if (added_elements->IsFailure()) return added_elements; + SetElementsTransitionMap(map, new_map); new_map->set_elements_kind(kind); - new_map->InitializeDescriptors(instance_descriptors()); - new_map->SetBackPointer(this); - set_owns_descriptors(false); + new_map->InitializeDescriptors(map->instance_descriptors()); + new_map->SetBackPointer(*map); + map->set_owns_descriptors(false); return new_map; } // In case the map did not own its own descriptors, a split is forced by // copying the map; creating a new descriptor array cell. // Create a new free-floating map only if we are not allowed to store it. - Map* new_map; - MaybeObject* maybe_new_map = Copy(); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; + Handle<Map> new_map = Copy(map); new_map->set_elements_kind(kind); if (insert_transition) { - MaybeObject* added_elements = set_elements_transition_map(new_map); - if (added_elements->IsFailure()) return added_elements; - new_map->SetBackPointer(this); + SetElementsTransitionMap(map, new_map); + new_map->SetBackPointer(*map); } return new_map; @@ -6939,14 +7496,13 @@ Handle<Map> Map::CopyForObserved(Handle<Map> map) { // transfer ownership to the new map. Handle<Map> new_map; if (map->owns_descriptors()) { - new_map = Map::CopyDropDescriptors(map); + new_map = CopyDropDescriptors(map); } else { - new_map = Map::Copy(map); + new_map = Copy(map); } - Handle<TransitionArray> transitions = - Map::AddTransition(map, isolate->factory()->observed_symbol(), new_map, - FULL_TRANSITION); + Handle<TransitionArray> transitions = TransitionArray::CopyInsert( + map, isolate->factory()->observed_symbol(), new_map, FULL_TRANSITION); map->set_transitions(*transitions); @@ -6962,127 +7518,127 @@ Handle<Map> Map::CopyForObserved(Handle<Map> map) { } -MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() { - if (pre_allocated_property_fields() == 0) return CopyDropDescriptors(); +Handle<Map> Map::Copy(Handle<Map> map) { + Handle<DescriptorArray> descriptors(map->instance_descriptors()); + int number_of_own_descriptors = map->NumberOfOwnDescriptors(); + Handle<DescriptorArray> new_descriptors = + DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors); + return CopyReplaceDescriptors( + map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>()); +} - // If the map has pre-allocated properties always start out with a descriptor - // array describing these properties. - ASSERT(constructor()->IsJSFunction()); - JSFunction* ctor = JSFunction::cast(constructor()); - Map* map = ctor->initial_map(); - DescriptorArray* descriptors = map->instance_descriptors(); - int number_of_own_descriptors = map->NumberOfOwnDescriptors(); - DescriptorArray* new_descriptors; - MaybeObject* maybe_descriptors = - descriptors->CopyUpTo(number_of_own_descriptors); - if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; +Handle<Map> Map::Create(Handle<JSFunction> constructor, + int extra_inobject_properties) { + Handle<Map> copy = Copy(handle(constructor->initial_map())); - return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION); -} + // Check that we do not overflow the instance size when adding the + // extra inobject properties. + int instance_size_delta = extra_inobject_properties * kPointerSize; + int max_instance_size_delta = + JSObject::kMaxInstanceSize - copy->instance_size(); + int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2; + // If the instance size overflows, we allocate as many properties as we can as + // inobject properties. + if (extra_inobject_properties > max_extra_properties) { + instance_size_delta = max_instance_size_delta; + extra_inobject_properties = max_extra_properties; + } -Handle<Map> Map::Copy(Handle<Map> map) { - CALL_HEAP_FUNCTION(map->GetIsolate(), map->Copy(), Map); + // Adjust the map with the extra inobject properties. + int inobject_properties = + copy->inobject_properties() + extra_inobject_properties; + copy->set_inobject_properties(inobject_properties); + copy->set_unused_property_fields(inobject_properties); + copy->set_instance_size(copy->instance_size() + instance_size_delta); + copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy)); + return copy; } -MaybeObject* Map::Copy() { - DescriptorArray* descriptors = instance_descriptors(); - DescriptorArray* new_descriptors; - int number_of_own_descriptors = NumberOfOwnDescriptors(); - MaybeObject* maybe_descriptors = - descriptors->CopyUpTo(number_of_own_descriptors); - if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; - - return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION); +Handle<Map> Map::CopyForFreeze(Handle<Map> map) { + int num_descriptors = map->NumberOfOwnDescriptors(); + Isolate* isolate = map->GetIsolate(); + Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes( + handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN); + Handle<Map> new_map = Map::CopyReplaceDescriptors( + map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol()); + new_map->freeze(); + new_map->set_is_extensible(false); + new_map->set_elements_kind(DICTIONARY_ELEMENTS); + return new_map; } -MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor, - TransitionFlag flag) { - DescriptorArray* descriptors = instance_descriptors(); +Handle<Map> Map::CopyAddDescriptor(Handle<Map> map, + Descriptor* descriptor, + TransitionFlag flag) { + Handle<DescriptorArray> descriptors(map->instance_descriptors()); // Ensure the key is unique. - MaybeObject* maybe_failure = descriptor->KeyToUniqueName(); - if (maybe_failure->IsFailure()) return maybe_failure; - - int old_size = NumberOfOwnDescriptors(); - int new_size = old_size + 1; + descriptor->KeyToUniqueName(); if (flag == INSERT_TRANSITION && - owns_descriptors() && - CanHaveMoreTransitions()) { - return ShareDescriptor(descriptors, descriptor); - } - - DescriptorArray* new_descriptors; - MaybeObject* maybe_descriptors = - DescriptorArray::Allocate(GetIsolate(), old_size, 1); - if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; - - DescriptorArray::WhitenessWitness witness(new_descriptors); - - // Copy the descriptors, inserting a descriptor. - for (int i = 0; i < old_size; ++i) { - new_descriptors->CopyFrom(i, descriptors, i, witness); + map->owns_descriptors() && + map->CanHaveMoreTransitions()) { + return ShareDescriptor(map, descriptors, descriptor); } - if (old_size != descriptors->number_of_descriptors()) { - new_descriptors->SetNumberOfDescriptors(new_size); - new_descriptors->Set(old_size, descriptor, witness); - new_descriptors->Sort(); - } else { - new_descriptors->Append(descriptor, witness); - } + Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( + descriptors, map->NumberOfOwnDescriptors(), 1); + new_descriptors->Append(descriptor); - Name* key = descriptor->GetKey(); - return CopyReplaceDescriptors(new_descriptors, flag, key, SIMPLE_TRANSITION); + return CopyReplaceDescriptors( + map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION); } -MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor, - TransitionFlag flag) { - DescriptorArray* old_descriptors = instance_descriptors(); +Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map, + Descriptor* descriptor, + TransitionFlag flag) { + Handle<DescriptorArray> old_descriptors(map->instance_descriptors()); // Ensure the key is unique. - MaybeObject* maybe_result = descriptor->KeyToUniqueName(); - if (maybe_result->IsFailure()) return maybe_result; + descriptor->KeyToUniqueName(); // We replace the key if it is already present. - int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this); + int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map); if (index != DescriptorArray::kNotFound) { - return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag); + return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag); } - return CopyAddDescriptor(descriptor, flag); + return CopyAddDescriptor(map, descriptor, flag); } -Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes( +Handle<DescriptorArray> DescriptorArray::CopyUpTo( Handle<DescriptorArray> desc, int enumeration_index, - PropertyAttributes attributes) { - CALL_HEAP_FUNCTION(desc->GetIsolate(), - desc->CopyUpToAddAttributes(enumeration_index, attributes), - DescriptorArray); + int slack) { + return DescriptorArray::CopyUpToAddAttributes( + desc, enumeration_index, NONE, slack); } -MaybeObject* DescriptorArray::CopyUpToAddAttributes( - int enumeration_index, PropertyAttributes attributes) { - if (enumeration_index == 0) return GetHeap()->empty_descriptor_array(); +Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes( + Handle<DescriptorArray> desc, + int enumeration_index, + PropertyAttributes attributes, + int slack) { + if (enumeration_index + slack == 0) { + return desc->GetIsolate()->factory()->empty_descriptor_array(); + } int size = enumeration_index; - DescriptorArray* descriptors; - MaybeObject* maybe_descriptors = Allocate(GetIsolate(), size); - if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors; - DescriptorArray::WhitenessWitness witness(descriptors); + Handle<DescriptorArray> descriptors = + DescriptorArray::Allocate(desc->GetIsolate(), size, slack); + DescriptorArray::WhitenessWitness witness(*descriptors); if (attributes != NONE) { for (int i = 0; i < size; ++i) { - Object* value = GetValue(i); - PropertyDetails details = GetDetails(i); + Object* value = desc->GetValue(i); + PropertyDetails details = desc->GetDetails(i); int mask = DONT_DELETE | DONT_ENUM; // READ_ONLY is an invalid attribute for JS setters/getters. if (details.type() != CALLBACKS || !value->IsAccessorPair()) { @@ -7090,59 +7646,44 @@ MaybeObject* DescriptorArray::CopyUpToAddAttributes( } details = details.CopyAddAttributes( static_cast<PropertyAttributes>(attributes & mask)); - Descriptor desc(GetKey(i), value, details); - descriptors->Set(i, &desc, witness); + Descriptor inner_desc(handle(desc->GetKey(i)), + handle(value, desc->GetIsolate()), + details); + descriptors->Set(i, &inner_desc, witness); } } else { for (int i = 0; i < size; ++i) { - descriptors->CopyFrom(i, this, i, witness); + descriptors->CopyFrom(i, *desc, witness); } } - if (number_of_descriptors() != enumeration_index) descriptors->Sort(); + if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort(); return descriptors; } -MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors, - Descriptor* descriptor, - int insertion_index, - TransitionFlag flag) { +Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map, + Handle<DescriptorArray> descriptors, + Descriptor* descriptor, + int insertion_index, + TransitionFlag flag) { // Ensure the key is unique. - MaybeObject* maybe_failure = descriptor->KeyToUniqueName(); - if (maybe_failure->IsFailure()) return maybe_failure; - - Name* key = descriptor->GetKey(); - ASSERT(key == descriptors->GetKey(insertion_index)); - - int new_size = NumberOfOwnDescriptors(); - ASSERT(0 <= insertion_index && insertion_index < new_size); - - ASSERT_LT(insertion_index, new_size); + descriptor->KeyToUniqueName(); - DescriptorArray* new_descriptors; - MaybeObject* maybe_descriptors = - DescriptorArray::Allocate(GetIsolate(), new_size); - if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; - DescriptorArray::WhitenessWitness witness(new_descriptors); + Handle<Name> key = descriptor->GetKey(); + ASSERT(*key == descriptors->GetKey(insertion_index)); - for (int i = 0; i < new_size; ++i) { - if (i == insertion_index) { - new_descriptors->Set(i, descriptor, witness); - } else { - new_descriptors->CopyFrom(i, descriptors, i, witness); - } - } + Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( + descriptors, map->NumberOfOwnDescriptors()); - // Re-sort if descriptors were removed. - if (new_size != descriptors->length()) new_descriptors->Sort(); + new_descriptors->Replace(insertion_index, descriptor); SimpleTransitionFlag simple_flag = (insertion_index == descriptors->number_of_descriptors() - 1) ? SIMPLE_TRANSITION : FULL_TRANSITION; - return CopyReplaceDescriptors(new_descriptors, flag, key, simple_flag); + return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag); } @@ -7150,23 +7691,16 @@ void Map::UpdateCodeCache(Handle<Map> map, Handle<Name> name, Handle<Code> code) { Isolate* isolate = map->GetIsolate(); - CALL_HEAP_FUNCTION_VOID(isolate, - map->UpdateCodeCache(*name, *code)); -} - - -MaybeObject* Map::UpdateCodeCache(Name* name, Code* code) { + HandleScope scope(isolate); // Allocate the code cache if not present. - if (code_cache()->IsFixedArray()) { - Object* result; - { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache(); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - set_code_cache(result); + if (map->code_cache()->IsFixedArray()) { + Handle<Object> result = isolate->factory()->NewCodeCache(); + map->set_code_cache(*result); } // Update the code cache. - return CodeCache::cast(code_cache())->Update(name, code); + Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate); + CodeCache::Update(code_cache, name, code); } @@ -7197,74 +7731,92 @@ void Map::RemoveFromCodeCache(Name* name, Code* code, int index) { } -// An iterator over all map transitions in an descriptor array, reusing the map -// field of the contens array while it is running. +// An iterator over all map transitions in an descriptor array, reusing the +// constructor field of the map while it is running. Negative values in +// the constructor field indicate an active map transition iteration. The +// original constructor is restored after iterating over all entries. class IntrusiveMapTransitionIterator { public: - explicit IntrusiveMapTransitionIterator(TransitionArray* transition_array) - : transition_array_(transition_array) { } + IntrusiveMapTransitionIterator( + Map* map, TransitionArray* transition_array, Object* constructor) + : map_(map), + transition_array_(transition_array), + constructor_(constructor) { } - void Start() { - ASSERT(!IsIterating()); - *TransitionArrayHeader() = Smi::FromInt(0); + void StartIfNotStarted() { + ASSERT(!(*IteratorField())->IsSmi() || IsIterating()); + if (!(*IteratorField())->IsSmi()) { + ASSERT(*IteratorField() == constructor_); + *IteratorField() = Smi::FromInt(-1); + } } bool IsIterating() { - return (*TransitionArrayHeader())->IsSmi(); + return (*IteratorField())->IsSmi() && + Smi::cast(*IteratorField())->value() < 0; } Map* Next() { ASSERT(IsIterating()); - int index = Smi::cast(*TransitionArrayHeader())->value(); + int value = Smi::cast(*IteratorField())->value(); + int index = -value - 1; int number_of_transitions = transition_array_->number_of_transitions(); while (index < number_of_transitions) { - *TransitionArrayHeader() = Smi::FromInt(index + 1); + *IteratorField() = Smi::FromInt(value - 1); return transition_array_->GetTarget(index); } - *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map(); + *IteratorField() = constructor_; return NULL; } private: - Object** TransitionArrayHeader() { - return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset); + Object** IteratorField() { + return HeapObject::RawField(map_, Map::kConstructorOffset); } + Map* map_; TransitionArray* transition_array_; + Object* constructor_; }; -// An iterator over all prototype transitions, reusing the map field of the -// underlying array while it is running. +// An iterator over all prototype transitions, reusing the constructor field +// of the map while it is running. Positive values in the constructor field +// indicate an active prototype transition iteration. The original constructor +// is restored after iterating over all entries. class IntrusivePrototypeTransitionIterator { public: - explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans) - : proto_trans_(proto_trans) { } + IntrusivePrototypeTransitionIterator( + Map* map, HeapObject* proto_trans, Object* constructor) + : map_(map), proto_trans_(proto_trans), constructor_(constructor) { } - void Start() { - ASSERT(!IsIterating()); - *Header() = Smi::FromInt(0); + void StartIfNotStarted() { + if (!(*IteratorField())->IsSmi()) { + ASSERT(*IteratorField() == constructor_); + *IteratorField() = Smi::FromInt(0); + } } bool IsIterating() { - return (*Header())->IsSmi(); + return (*IteratorField())->IsSmi() && + Smi::cast(*IteratorField())->value() >= 0; } Map* Next() { ASSERT(IsIterating()); - int transitionNumber = Smi::cast(*Header())->value(); + int transitionNumber = Smi::cast(*IteratorField())->value(); if (transitionNumber < NumberOfTransitions()) { - *Header() = Smi::FromInt(transitionNumber + 1); + *IteratorField() = Smi::FromInt(transitionNumber + 1); return GetTransition(transitionNumber); } - *Header() = proto_trans_->GetHeap()->fixed_array_map(); + *IteratorField() = constructor_; return NULL; } private: - Object** Header() { - return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset); + Object** IteratorField() { + return HeapObject::RawField(map_, Map::kConstructorOffset); } int NumberOfTransitions() { @@ -7284,29 +7836,33 @@ class IntrusivePrototypeTransitionIterator { transitionNumber * Map::kProtoTransitionElementsPerEntry; } + Map* map_; HeapObject* proto_trans_; + Object* constructor_; }; // To traverse the transition tree iteratively, we have to store two kinds of // information in a map: The parent map in the traversal and which children of a // node have already been visited. To do this without additional memory, we -// temporarily reuse two maps with known values: +// temporarily reuse two fields with known values: // // (1) The map of the map temporarily holds the parent, and is restored to the // meta map afterwards. // // (2) The info which children have already been visited depends on which part -// of the map we currently iterate: +// of the map we currently iterate. We use the constructor field of the +// map to store the current index. We can do that because the constructor +// is the same for all involved maps. // // (a) If we currently follow normal map transitions, we temporarily store -// the current index in the map of the FixedArray of the desciptor -// array's contents, and restore it to the fixed array map afterwards. -// Note that a single descriptor can have 0, 1, or 2 transitions. +// the current index in the constructor field, and restore it to the +// original constructor afterwards. Note that a single descriptor can +// have 0, 1, or 2 transitions. // // (b) If we currently follow prototype transitions, we temporarily store -// the current index in the map of the FixedArray holding the prototype -// transitions, and restore it to the fixed array map afterwards. +// the current index in the constructor field, and restore it to the +// original constructor afterwards. // // Note that the child iterator is just a concatenation of two iterators: One // iterating over map transitions and one iterating over prototype transisitons. @@ -7323,38 +7879,29 @@ class TraversableMap : public Map { return old_parent; } - // Start iterating over this map's children, possibly destroying a FixedArray - // map (see explanation above). - void ChildIteratorStart() { - if (HasTransitionArray()) { - if (HasPrototypeTransitions()) { - IntrusivePrototypeTransitionIterator(GetPrototypeTransitions()).Start(); - } - - IntrusiveMapTransitionIterator(transitions()).Start(); - } - } - // If we have an unvisited child map, return that one and advance. If we have - // none, return NULL and reset any destroyed FixedArray maps. - TraversableMap* ChildIteratorNext() { - TransitionArray* transition_array = unchecked_transition_array(); - if (!transition_array->map()->IsSmi() && - !transition_array->IsTransitionArray()) { - return NULL; - } + // none, return NULL and restore the overwritten constructor field. + TraversableMap* ChildIteratorNext(Object* constructor) { + if (!HasTransitionArray()) return NULL; + TransitionArray* transition_array = transitions(); if (transition_array->HasPrototypeTransitions()) { HeapObject* proto_transitions = - transition_array->UncheckedPrototypeTransitions(); - IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions); + transition_array->GetPrototypeTransitions(); + IntrusivePrototypeTransitionIterator proto_iterator(this, + proto_transitions, + constructor); + proto_iterator.StartIfNotStarted(); if (proto_iterator.IsIterating()) { Map* next = proto_iterator.Next(); if (next != NULL) return static_cast<TraversableMap*>(next); } } - IntrusiveMapTransitionIterator transition_iterator(transition_array); + IntrusiveMapTransitionIterator transition_iterator(this, + transition_array, + constructor); + transition_iterator.StartIfNotStarted(); if (transition_iterator.IsIterating()) { Map* next = transition_iterator.Next(); if (next != NULL) return static_cast<TraversableMap*>(next); @@ -7368,12 +7915,16 @@ class TraversableMap : public Map { // Traverse the transition tree in postorder without using the C++ stack by // doing pointer reversal. void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { + // Make sure that we do not allocate in the callback. + DisallowHeapAllocation no_allocation; + TraversableMap* current = static_cast<TraversableMap*>(this); - current->ChildIteratorStart(); + // Get the root constructor here to restore it later when finished iterating + // over maps. + Object* root_constructor = constructor(); while (true) { - TraversableMap* child = current->ChildIteratorNext(); + TraversableMap* child = current->ChildIteratorNext(root_constructor); if (child != NULL) { - child->ChildIteratorStart(); child->SetParent(current); current = child; } else { @@ -7386,30 +7937,29 @@ void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { } -MaybeObject* CodeCache::Update(Name* name, Code* code) { +void CodeCache::Update( + Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { // The number of monomorphic stubs for normal load/store/call IC's can grow to // a large number and therefore they need to go into a hash table. They are // used to load global properties from cells. if (code->type() == Code::NORMAL) { // Make sure that a hash table is allocated for the normal load code cache. - if (normal_type_cache()->IsUndefined()) { - Object* result; - { MaybeObject* maybe_result = - CodeCacheHashTable::Allocate(GetHeap(), - CodeCacheHashTable::kInitialSize); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - set_normal_type_cache(result); + if (code_cache->normal_type_cache()->IsUndefined()) { + Handle<Object> result = + CodeCacheHashTable::New(code_cache->GetIsolate(), + CodeCacheHashTable::kInitialSize); + code_cache->set_normal_type_cache(*result); } - return UpdateNormalTypeCache(name, code); + UpdateNormalTypeCache(code_cache, name, code); } else { - ASSERT(default_cache()->IsFixedArray()); - return UpdateDefaultCache(name, code); + ASSERT(code_cache->default_cache()->IsFixedArray()); + UpdateDefaultCache(code_cache, name, code); } } -MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) { +void CodeCache::UpdateDefaultCache( + Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { // When updating the default code cache we disregard the type encoded in the // flags. This allows call constant stubs to overwrite call field // stubs, etc. @@ -7417,37 +7967,40 @@ MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) { // First check whether we can update existing code cache without // extending it. - FixedArray* cache = default_cache(); + Handle<FixedArray> cache = handle(code_cache->default_cache()); int length = cache->length(); - int deleted_index = -1; - for (int i = 0; i < length; i += kCodeCacheEntrySize) { - Object* key = cache->get(i); - if (key->IsNull()) { - if (deleted_index < 0) deleted_index = i; - continue; - } - if (key->IsUndefined()) { - if (deleted_index >= 0) i = deleted_index; - cache->set(i + kCodeCacheEntryNameOffset, name); - cache->set(i + kCodeCacheEntryCodeOffset, code); - return this; - } - if (name->Equals(Name::cast(key))) { - Code::Flags found = - Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags(); - if (Code::RemoveTypeFromFlags(found) == flags) { - cache->set(i + kCodeCacheEntryCodeOffset, code); - return this; + { + DisallowHeapAllocation no_alloc; + int deleted_index = -1; + for (int i = 0; i < length; i += kCodeCacheEntrySize) { + Object* key = cache->get(i); + if (key->IsNull()) { + if (deleted_index < 0) deleted_index = i; + continue; + } + if (key->IsUndefined()) { + if (deleted_index >= 0) i = deleted_index; + cache->set(i + kCodeCacheEntryNameOffset, *name); + cache->set(i + kCodeCacheEntryCodeOffset, *code); + return; + } + if (name->Equals(Name::cast(key))) { + Code::Flags found = + Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags(); + if (Code::RemoveTypeFromFlags(found) == flags) { + cache->set(i + kCodeCacheEntryCodeOffset, *code); + return; + } } } - } - // Reached the end of the code cache. If there were deleted - // elements, reuse the space for the first of them. - if (deleted_index >= 0) { - cache->set(deleted_index + kCodeCacheEntryNameOffset, name); - cache->set(deleted_index + kCodeCacheEntryCodeOffset, code); - return this; + // Reached the end of the code cache. If there were deleted + // elements, reuse the space for the first of them. + if (deleted_index >= 0) { + cache->set(deleted_index + kCodeCacheEntryNameOffset, *name); + cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code); + return; + } } // Extend the code cache with some new entries (at least one). Must be a @@ -7455,29 +8008,22 @@ MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) { int new_length = length + ((length >> 1)) + kCodeCacheEntrySize; new_length = new_length - new_length % kCodeCacheEntrySize; ASSERT((new_length % kCodeCacheEntrySize) == 0); - Object* result; - { MaybeObject* maybe_result = cache->CopySize(new_length); - if (!maybe_result->ToObject(&result)) return maybe_result; - } + cache = FixedArray::CopySize(cache, new_length); // Add the (name, code) pair to the new cache. - cache = FixedArray::cast(result); - cache->set(length + kCodeCacheEntryNameOffset, name); - cache->set(length + kCodeCacheEntryCodeOffset, code); - set_default_cache(cache); - return this; + cache->set(length + kCodeCacheEntryNameOffset, *name); + cache->set(length + kCodeCacheEntryCodeOffset, *code); + code_cache->set_default_cache(*cache); } -MaybeObject* CodeCache::UpdateNormalTypeCache(Name* name, Code* code) { +void CodeCache::UpdateNormalTypeCache( + Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { // Adding a new entry can cause a new cache to be allocated. - CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); - Object* new_cache; - { MaybeObject* maybe_new_cache = cache->Put(name, code); - if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache; - } - set_normal_type_cache(new_cache); - return this; + Handle<CodeCacheHashTable> cache( + CodeCacheHashTable::cast(code_cache->normal_type_cache())); + Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code); + code_cache->set_normal_type_cache(*new_cache); } @@ -7561,14 +8107,13 @@ void CodeCache::RemoveByIndex(Object* name, Code* code, int index) { // lookup not to create a new entry. class CodeCacheHashTableKey : public HashTableKey { public: - CodeCacheHashTableKey(Name* name, Code::Flags flags) - : name_(name), flags_(flags), code_(NULL) { } + CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags) + : name_(name), flags_(flags), code_() { } - CodeCacheHashTableKey(Name* name, Code* code) + CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code) : name_(name), flags_(code->flags()), code_(code) { } - - bool IsMatch(Object* other) { + bool IsMatch(Object* other) V8_OVERRIDE { if (!other->IsFixedArray()) return false; FixedArray* pair = FixedArray::cast(other); Name* name = Name::cast(pair->get(0)); @@ -7583,68 +8128,59 @@ class CodeCacheHashTableKey : public HashTableKey { return name->Hash() ^ flags; } - uint32_t Hash() { return NameFlagsHashHelper(name_, flags_); } + uint32_t Hash() V8_OVERRIDE { return NameFlagsHashHelper(*name_, flags_); } - uint32_t HashForObject(Object* obj) { + uint32_t HashForObject(Object* obj) V8_OVERRIDE { FixedArray* pair = FixedArray::cast(obj); Name* name = Name::cast(pair->get(0)); Code* code = Code::cast(pair->get(1)); return NameFlagsHashHelper(name, code->flags()); } - MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { - ASSERT(code_ != NULL); - Object* obj; - { MaybeObject* maybe_obj = heap->AllocateFixedArray(2); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* pair = FixedArray::cast(obj); - pair->set(0, name_); - pair->set(1, code_); + MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { + Handle<Code> code = code_.ToHandleChecked(); + Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2); + pair->set(0, *name_); + pair->set(1, *code); return pair; } private: - Name* name_; + Handle<Name> name_; Code::Flags flags_; // TODO(jkummerow): We should be able to get by without this. - Code* code_; + MaybeHandle<Code> code_; }; Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) { - CodeCacheHashTableKey key(name, flags); + DisallowHeapAllocation no_alloc; + CodeCacheHashTableKey key(handle(name), flags); int entry = FindEntry(&key); if (entry == kNotFound) return GetHeap()->undefined_value(); return get(EntryToIndex(entry) + 1); } -MaybeObject* CodeCacheHashTable::Put(Name* name, Code* code) { +Handle<CodeCacheHashTable> CodeCacheHashTable::Put( + Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) { CodeCacheHashTableKey key(name, code); - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, &key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - // Don't use |this|, as the table might have grown. - CodeCacheHashTable* cache = reinterpret_cast<CodeCacheHashTable*>(obj); + Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key); - int entry = cache->FindInsertionEntry(key.Hash()); - Object* k; - { MaybeObject* maybe_k = key.AsObject(GetHeap()); - if (!maybe_k->ToObject(&k)) return maybe_k; - } + int entry = new_cache->FindInsertionEntry(key.Hash()); + Handle<Object> k = key.AsHandle(cache->GetIsolate()); - cache->set(EntryToIndex(entry), k); - cache->set(EntryToIndex(entry) + 1, code); - cache->ElementAdded(); - return cache; + new_cache->set(EntryToIndex(entry), *k); + new_cache->set(EntryToIndex(entry) + 1, *code); + new_cache->ElementAdded(); + return new_cache; } int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) { - CodeCacheHashTableKey key(name, flags); + DisallowHeapAllocation no_alloc; + CodeCacheHashTableKey key(handle(name), flags); int entry = FindEntry(&key); return (entry == kNotFound) ? -1 : entry; } @@ -7659,41 +8195,27 @@ void CodeCacheHashTable::RemoveByIndex(int index) { } -void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache, +void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache, MapHandleList* maps, Code::Flags flags, Handle<Code> code) { - Isolate* isolate = cache->GetIsolate(); - CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code)); -} - - -MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps, - Code::Flags flags, - Code* code) { - // Initialize cache if necessary. - if (cache()->IsUndefined()) { - Object* result; - { MaybeObject* maybe_result = - PolymorphicCodeCacheHashTable::Allocate( - GetHeap(), - PolymorphicCodeCacheHashTable::kInitialSize); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - set_cache(result); + Isolate* isolate = code_cache->GetIsolate(); + if (code_cache->cache()->IsUndefined()) { + Handle<PolymorphicCodeCacheHashTable> result = + PolymorphicCodeCacheHashTable::New( + isolate, + PolymorphicCodeCacheHashTable::kInitialSize); + code_cache->set_cache(*result); } else { // This entry shouldn't be contained in the cache yet. - ASSERT(PolymorphicCodeCacheHashTable::cast(cache()) + ASSERT(PolymorphicCodeCacheHashTable::cast(code_cache->cache()) ->Lookup(maps, flags)->IsUndefined()); } - PolymorphicCodeCacheHashTable* hash_table = - PolymorphicCodeCacheHashTable::cast(cache()); - Object* new_cache; - { MaybeObject* maybe_new_cache = hash_table->Put(maps, flags, code); - if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache; - } - set_cache(new_cache); - return this; + Handle<PolymorphicCodeCacheHashTable> hash_table = + handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache())); + Handle<PolymorphicCodeCacheHashTable> new_cache = + PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code); + code_cache->set_cache(*new_cache); } @@ -7719,7 +8241,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { : maps_(maps), code_flags_(code_flags) {} - bool IsMatch(Object* other) { + bool IsMatch(Object* other) V8_OVERRIDE { MapHandleList other_maps(kDefaultListAllocationSize); int other_flags; FromObject(other, &other_flags, &other_maps); @@ -7754,27 +8276,23 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { return hash; } - uint32_t Hash() { + uint32_t Hash() V8_OVERRIDE { return MapsHashHelper(maps_, code_flags_); } - uint32_t HashForObject(Object* obj) { + uint32_t HashForObject(Object* obj) V8_OVERRIDE { MapHandleList other_maps(kDefaultListAllocationSize); int other_flags; FromObject(obj, &other_flags, &other_maps); return MapsHashHelper(&other_maps, other_flags); } - MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { - Object* obj; + MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { // The maps in |maps_| must be copied to a newly allocated FixedArray, // both because the referenced MapList is short-lived, and because C++ // objects can't be stored in the heap anyway. - { MaybeObject* maybe_obj = - heap->AllocateUninitializedFixedArray(maps_->length() + 1); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* list = FixedArray::cast(obj); + Handle<FixedArray> list = + isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1); list->set(0, Smi::FromInt(code_flags_)); for (int i = 0; i < maps_->length(); ++i) { list->set(i + 1, *maps_->at(i)); @@ -7802,30 +8320,28 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, - int code_flags) { - PolymorphicCodeCacheHashTableKey key(maps, code_flags); + int code_kind) { + DisallowHeapAllocation no_alloc; + PolymorphicCodeCacheHashTableKey key(maps, code_kind); int entry = FindEntry(&key); if (entry == kNotFound) return GetHeap()->undefined_value(); return get(EntryToIndex(entry) + 1); } -MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps, - int code_flags, - Code* code) { - PolymorphicCodeCacheHashTableKey key(maps, code_flags); - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, &key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - PolymorphicCodeCacheHashTable* cache = - reinterpret_cast<PolymorphicCodeCacheHashTable*>(obj); +Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put( + Handle<PolymorphicCodeCacheHashTable> hash_table, + MapHandleList* maps, + int code_kind, + Handle<Code> code) { + PolymorphicCodeCacheHashTableKey key(maps, code_kind); + Handle<PolymorphicCodeCacheHashTable> cache = + EnsureCapacity(hash_table, 1, &key); int entry = cache->FindInsertionEntry(key.Hash()); - { MaybeObject* maybe_obj = key.AsObject(GetHeap()); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - cache->set(EntryToIndex(entry), obj); - cache->set(EntryToIndex(entry) + 1, code); + + Handle<Object> obj = key.AsHandle(hash_table->GetIsolate()); + cache->set(EntryToIndex(entry), *obj); + cache->set(EntryToIndex(entry) + 1, *code); cache->ElementAdded(); return cache; } @@ -7840,14 +8356,20 @@ void FixedArray::Shrink(int new_length) { } -MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) { +MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike( + Handle<FixedArray> content, + Handle<JSObject> array) { + ASSERT(array->IsJSArray() || array->HasSloppyArgumentsElements()); ElementsAccessor* accessor = array->GetElementsAccessor(); - MaybeObject* maybe_result = - accessor->AddElementsToFixedArray(array, array, this); - FixedArray* result; - if (!maybe_result->To<FixedArray>(&result)) return maybe_result; + Handle<FixedArray> result; + ASSIGN_RETURN_ON_EXCEPTION( + array->GetIsolate(), result, + accessor->AddElementsToFixedArray(array, array, content), + FixedArray); + #ifdef ENABLE_SLOW_ASSERTS if (FLAG_enable_slow_asserts) { + DisallowHeapAllocation no_allocation; for (int i = 0; i < result->length(); i++) { Object* current = result->get(i); ASSERT(current->IsNumber() || current->IsName()); @@ -7858,14 +8380,22 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) { } -MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) { - ElementsAccessor* accessor = ElementsAccessor::ForArray(other); - MaybeObject* maybe_result = - accessor->AddElementsToFixedArray(NULL, NULL, this, other); - FixedArray* result; - if (!maybe_result->To(&result)) return maybe_result; +MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first, + Handle<FixedArray> second) { + ElementsAccessor* accessor = ElementsAccessor::ForArray(second); + Handle<FixedArray> result; + ASSIGN_RETURN_ON_EXCEPTION( + first->GetIsolate(), result, + accessor->AddElementsToFixedArray( + Handle<Object>::null(), // receiver + Handle<JSObject>::null(), // holder + first, + Handle<FixedArrayBase>::cast(second)), + FixedArray); + #ifdef ENABLE_SLOW_ASSERTS if (FLAG_enable_slow_asserts) { + DisallowHeapAllocation no_allocation; for (int i = 0; i < result->length(); i++) { Object* current = result->get(i); ASSERT(current->IsNumber() || current->IsName()); @@ -7876,24 +8406,22 @@ MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) { } -MaybeObject* FixedArray::CopySize(int new_length, PretenureFlag pretenure) { - Heap* heap = GetHeap(); - if (new_length == 0) return heap->empty_fixed_array(); - Object* obj; - { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length, pretenure); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* result = FixedArray::cast(obj); +Handle<FixedArray> FixedArray::CopySize( + Handle<FixedArray> array, int new_length, PretenureFlag pretenure) { + Isolate* isolate = array->GetIsolate(); + if (new_length == 0) return isolate->factory()->empty_fixed_array(); + Handle<FixedArray> result = + isolate->factory()->NewFixedArray(new_length, pretenure); // Copy the content DisallowHeapAllocation no_gc; - int len = length(); + int len = array->length(); if (new_length < len) len = new_length; // We are taking the map from the old fixed array so the map is sure to // be an immortal immutable object. - result->set_map_no_write_barrier(map()); + result->set_map_no_write_barrier(array->map()); WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); for (int i = 0; i < len; i++) { - result->set(i, get(i), mode); + result->set(i, array->get(i), mode); } return result; } @@ -7919,21 +8447,20 @@ bool FixedArray::IsEqualTo(FixedArray* other) { #endif -MaybeObject* DescriptorArray::Allocate(Isolate* isolate, - int number_of_descriptors, - int slack) { - Heap* heap = isolate->heap(); +Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate, + int number_of_descriptors, + int slack) { + ASSERT(0 <= number_of_descriptors); + Factory* factory = isolate->factory(); // Do not use DescriptorArray::cast on incomplete object. int size = number_of_descriptors + slack; - if (size == 0) return heap->empty_descriptor_array(); - FixedArray* result; + if (size == 0) return factory->empty_descriptor_array(); // Allocate the array of keys. - MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size)); - if (!maybe_array->To(&result)) return maybe_array; + Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size)); result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors)); result->set(kEnumCacheIndex, Smi::FromInt(0)); - return result; + return Handle<DescriptorArray>::cast(result); } @@ -7942,6 +8469,12 @@ void DescriptorArray::ClearEnumCache() { } +void DescriptorArray::Replace(int index, Descriptor* descriptor) { + descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index)); + Set(index, descriptor); +} + + void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, FixedArray* new_cache, Object* new_index_cache) { @@ -7957,138 +8490,15 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, } -void DescriptorArray::CopyFrom(int dst_index, +void DescriptorArray::CopyFrom(int index, DescriptorArray* src, - int src_index, const WhitenessWitness& witness) { - Object* value = src->GetValue(src_index); - PropertyDetails details = src->GetDetails(src_index); - Descriptor desc(src->GetKey(src_index), value, details); - Set(dst_index, &desc, witness); -} - - -Handle<DescriptorArray> DescriptorArray::Merge(Handle<DescriptorArray> desc, - int verbatim, - int valid, - int new_size, - int modify_index, - StoreMode store_mode, - Handle<DescriptorArray> other) { - CALL_HEAP_FUNCTION(desc->GetIsolate(), - desc->Merge(verbatim, valid, new_size, modify_index, - store_mode, *other), - DescriptorArray); -} - - -// Generalize the |other| descriptor array by merging it into the (at least -// partly) updated |this| descriptor array. -// The method merges two descriptor array in three parts. Both descriptor arrays -// are identical up to |verbatim|. They also overlap in keys up to |valid|. -// Between |verbatim| and |valid|, the resulting descriptor type as well as the -// representation are generalized from both |this| and |other|. Beyond |valid|, -// the descriptors are copied verbatim from |other| up to |new_size|. -// In case of incompatible types, the type and representation of |other| is -// used. -MaybeObject* DescriptorArray::Merge(int verbatim, - int valid, - int new_size, - int modify_index, - StoreMode store_mode, - DescriptorArray* other) { - ASSERT(verbatim <= valid); - ASSERT(valid <= new_size); - - DescriptorArray* result; - // Allocate a new descriptor array large enough to hold the required - // descriptors, with minimally the exact same size as this descriptor array. - MaybeObject* maybe_descriptors = DescriptorArray::Allocate( - GetIsolate(), new_size, - Max(new_size, other->number_of_descriptors()) - new_size); - if (!maybe_descriptors->To(&result)) return maybe_descriptors; - ASSERT(result->length() > length() || - result->NumberOfSlackDescriptors() > 0 || - result->number_of_descriptors() == other->number_of_descriptors()); - ASSERT(result->number_of_descriptors() == new_size); - - DescriptorArray::WhitenessWitness witness(result); - - int descriptor; - - // 0 -> |verbatim| - int current_offset = 0; - for (descriptor = 0; descriptor < verbatim; descriptor++) { - if (GetDetails(descriptor).type() == FIELD) current_offset++; - result->CopyFrom(descriptor, other, descriptor, witness); - } - - // |verbatim| -> |valid| - for (; descriptor < valid; descriptor++) { - Name* key = GetKey(descriptor); - PropertyDetails details = GetDetails(descriptor); - PropertyDetails other_details = other->GetDetails(descriptor); - - if (details.type() == FIELD || other_details.type() == FIELD || - (store_mode == FORCE_FIELD && descriptor == modify_index) || - (details.type() == CONSTANT && - other_details.type() == CONSTANT && - GetValue(descriptor) != other->GetValue(descriptor))) { - Representation representation = - details.representation().generalize(other_details.representation()); - FieldDescriptor d(key, - current_offset++, - other_details.attributes(), - representation); - result->Set(descriptor, &d, witness); - } else { - result->CopyFrom(descriptor, other, descriptor, witness); - } - } - - // |valid| -> |new_size| - for (; descriptor < new_size; descriptor++) { - PropertyDetails details = other->GetDetails(descriptor); - if (details.type() == FIELD || - (store_mode == FORCE_FIELD && descriptor == modify_index)) { - Name* key = other->GetKey(descriptor); - FieldDescriptor d(key, - current_offset++, - details.attributes(), - details.representation()); - result->Set(descriptor, &d, witness); - } else { - result->CopyFrom(descriptor, other, descriptor, witness); - } - } - - result->Sort(); - return result; -} - - -// Checks whether a merge of |other| into |this| would return a copy of |this|. -bool DescriptorArray::IsMoreGeneralThan(int verbatim, - int valid, - int new_size, - DescriptorArray* other) { - ASSERT(verbatim <= valid); - ASSERT(valid <= new_size); - if (valid != new_size) return false; - - for (int descriptor = verbatim; descriptor < valid; descriptor++) { - PropertyDetails details = GetDetails(descriptor); - PropertyDetails other_details = other->GetDetails(descriptor); - if (!other_details.representation().fits_into(details.representation())) { - return false; - } - if (details.type() == CONSTANT) { - if (other_details.type() != CONSTANT) return false; - if (GetValue(descriptor) != other->GetValue(descriptor)) return false; - } - } - - return true; + Object* value = src->GetValue(index); + PropertyDetails details = src->GetDetails(index); + Descriptor desc(handle(src->GetKey(index)), + handle(value, src->GetIsolate()), + details); + Set(index, &desc, witness); } @@ -8165,21 +8575,29 @@ Object* AccessorPair::GetComponent(AccessorComponent component) { } -MaybeObject* DeoptimizationInputData::Allocate(Isolate* isolate, - int deopt_entry_count, - PretenureFlag pretenure) { +Handle<DeoptimizationInputData> DeoptimizationInputData::New( + Isolate* isolate, + int deopt_entry_count, + PretenureFlag pretenure) { ASSERT(deopt_entry_count > 0); - return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count), - pretenure); + return Handle<DeoptimizationInputData>::cast( + isolate->factory()->NewFixedArray( + LengthFor(deopt_entry_count), pretenure)); } -MaybeObject* DeoptimizationOutputData::Allocate(Isolate* isolate, - int number_of_deopt_points, - PretenureFlag pretenure) { - if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array(); - return isolate->heap()->AllocateFixedArray( - LengthOfFixedArray(number_of_deopt_points), pretenure); +Handle<DeoptimizationOutputData> DeoptimizationOutputData::New( + Isolate* isolate, + int number_of_deopt_points, + PretenureFlag pretenure) { + Handle<FixedArray> result; + if (number_of_deopt_points == 0) { + result = isolate->factory()->empty_fixed_array(); + } else { + result = isolate->factory()->NewFixedArray( + LengthOfFixedArray(number_of_deopt_points), pretenure); + } + return Handle<DeoptimizationOutputData>::cast(result); } @@ -8255,7 +8673,7 @@ String::FlatContent String::GetFlatContent() { } else { start = ExternalAsciiString::cast(string)->GetChars(); } - return FlatContent(Vector<const uint8_t>(start + offset, length)); + return FlatContent(start + offset, length); } else { ASSERT(shape.encoding_tag() == kTwoByteStringTag); const uc16* start; @@ -8264,7 +8682,7 @@ String::FlatContent String::GetFlatContent() { } else { start = ExternalTwoByteString::cast(string)->GetChars(); } - return FlatContent(Vector<const uc16>(start + offset, length)); + return FlatContent(start + offset, length); } } @@ -8460,34 +8878,47 @@ void FlatStringReader::PostGarbageCollection() { } -String* ConsStringIteratorOp::Operate(String* string, - unsigned* offset_out, - int32_t* type_out, - unsigned* length_out) { - ASSERT(string->IsConsString()); - ConsString* cons_string = ConsString::cast(string); - // Set up search data. +void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) { + ASSERT(cons_string != NULL); root_ = cons_string; - consumed_ = *offset_out; - // Now search. - return Search(offset_out, type_out, length_out); + consumed_ = offset; + // Force stack blown condition to trigger restart. + depth_ = 1; + maximum_depth_ = kStackSize + depth_; + ASSERT(StackBlown()); } -String* ConsStringIteratorOp::Search(unsigned* offset_out, - int32_t* type_out, - unsigned* length_out) { +String* ConsStringIteratorOp::Continue(int* offset_out) { + ASSERT(depth_ != 0); + ASSERT_EQ(0, *offset_out); + bool blew_stack = StackBlown(); + String* string = NULL; + // Get the next leaf if there is one. + if (!blew_stack) string = NextLeaf(&blew_stack); + // Restart search from root. + if (blew_stack) { + ASSERT(string == NULL); + string = Search(offset_out); + } + // Ensure future calls return null immediately. + if (string == NULL) Reset(NULL); + return string; +} + + +String* ConsStringIteratorOp::Search(int* offset_out) { ConsString* cons_string = root_; // Reset the stack, pushing the root string. depth_ = 1; maximum_depth_ = 1; frames_[0] = cons_string; - const unsigned consumed = consumed_; - unsigned offset = 0; + const int consumed = consumed_; + int offset = 0; while (true) { // Loop until the string is found which contains the target offset. String* string = cons_string->first(); - unsigned length = string->length(); + int length = string->length(); int32_t type; if (consumed < offset + length) { // Target offset is in the left branch. @@ -8498,7 +8929,7 @@ String* ConsStringIteratorOp::Search(unsigned* offset_out, PushLeft(cons_string); continue; } - // Tell the stack we're done decending. + // Tell the stack we're done descending. AdjustMaximumDepth(); } else { // Descend right. @@ -8510,7 +8941,6 @@ String* ConsStringIteratorOp::Search(unsigned* offset_out, if ((type & kStringRepresentationMask) == kConsStringTag) { cons_string = ConsString::cast(string); PushRight(cons_string); - // TODO(dcarney) Add back root optimization. continue; } // Need this to be updated for the current string. @@ -8518,11 +8948,11 @@ String* ConsStringIteratorOp::Search(unsigned* offset_out, // Account for the possibility of an empty right leaf. // This happens only if we have asked for an offset outside the string. if (length == 0) { - // Reset depth so future operations will return null immediately. - Reset(); + // Reset so future operations will return null immediately. + Reset(NULL); return NULL; } - // Tell the stack we're done decending. + // Tell the stack we're done descending. AdjustMaximumDepth(); // Pop stack so next iteration is in correct place. Pop(); @@ -8531,8 +8961,6 @@ String* ConsStringIteratorOp::Search(unsigned* offset_out, // Adjust return values and exit. consumed_ = offset + length; *offset_out = consumed - offset; - *type_out = type; - *length_out = length; return string; } UNREACHABLE(); @@ -8540,9 +8968,7 @@ String* ConsStringIteratorOp::Search(unsigned* offset_out, } -String* ConsStringIteratorOp::NextLeaf(bool* blew_stack, - int32_t* type_out, - unsigned* length_out) { +String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) { while (true) { // Tree traversal complete. if (depth_ == 0) { @@ -8550,7 +8976,7 @@ String* ConsStringIteratorOp::NextLeaf(bool* blew_stack, return NULL; } // We've lost track of higher nodes. - if (maximum_depth_ - depth_ == kStackSize) { + if (StackBlown()) { *blew_stack = true; return NULL; } @@ -8561,16 +8987,13 @@ String* ConsStringIteratorOp::NextLeaf(bool* blew_stack, if ((type & kStringRepresentationMask) != kConsStringTag) { // Pop stack so next iteration is in correct place. Pop(); - unsigned length = static_cast<unsigned>(string->length()); + int length = string->length(); // Could be a flattened ConsString. if (length == 0) continue; - *length_out = length; - *type_out = type; consumed_ += length; return string; } cons_string = ConsString::cast(string); - // TODO(dcarney) Add back root optimization. PushRight(cons_string); // Need to traverse all the way left. while (true) { @@ -8579,10 +9002,8 @@ String* ConsStringIteratorOp::NextLeaf(bool* blew_stack, type = string->map()->instance_type(); if ((type & kStringRepresentationMask) != kConsStringTag) { AdjustMaximumDepth(); - unsigned length = static_cast<unsigned>(string->length()); + int length = string->length(); ASSERT(length != 0); - *length_out = length; - *type_out = type; consumed_ += length; return string; } @@ -8721,6 +9142,64 @@ void String::WriteToFlat(String* src, } + +template <typename SourceChar> +static void CalculateLineEndsImpl(Isolate* isolate, + List<int>* line_ends, + Vector<const SourceChar> src, + bool include_ending_line) { + const int src_len = src.length(); + StringSearch<uint8_t, SourceChar> search(isolate, STATIC_ASCII_VECTOR("\n")); + + // Find and record line ends. + int position = 0; + while (position != -1 && position < src_len) { + position = search.Search(src, position); + if (position != -1) { + line_ends->Add(position); + position++; + } else if (include_ending_line) { + // Even if the last line misses a line end, it is counted. + line_ends->Add(src_len); + return; + } + } +} + + +Handle<FixedArray> String::CalculateLineEnds(Handle<String> src, + bool include_ending_line) { + src = Flatten(src); + // Rough estimate of line count based on a roughly estimated average + // length of (unpacked) code. + int line_count_estimate = src->length() >> 4; + List<int> line_ends(line_count_estimate); + Isolate* isolate = src->GetIsolate(); + { DisallowHeapAllocation no_allocation; // ensure vectors stay valid. + // Dispatch on type of strings. + String::FlatContent content = src->GetFlatContent(); + ASSERT(content.IsFlat()); + if (content.IsAscii()) { + CalculateLineEndsImpl(isolate, + &line_ends, + content.ToOneByteVector(), + include_ending_line); + } else { + CalculateLineEndsImpl(isolate, + &line_ends, + content.ToUC16Vector(), + include_ending_line); + } + } + int line_count = line_ends.length(); + Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count); + for (int i = 0; i < line_count; i++) { + array->set(i, Smi::FromInt(line_ends[i])); + } + return array; +} + + // Compares the contents of two strings by reading and comparing // int-sized blocks of characters. template <typename Char> @@ -8799,25 +9278,29 @@ class StringComparator { explicit inline State(ConsStringIteratorOp* op) : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {} - inline void Init(String* string, unsigned len) { - op_->Reset(); - int32_t type = string->map()->instance_type(); - String::Visit(string, 0, *this, *op_, type, len); + inline void Init(String* string) { + ConsString* cons_string = String::VisitFlat(this, string); + op_->Reset(cons_string); + if (cons_string != NULL) { + int offset; + string = op_->Next(&offset); + String::VisitFlat(this, string, offset); + } } - inline void VisitOneByteString(const uint8_t* chars, unsigned length) { + inline void VisitOneByteString(const uint8_t* chars, int length) { is_one_byte_ = true; buffer8_ = chars; length_ = length; } - inline void VisitTwoByteString(const uint16_t* chars, unsigned length) { + inline void VisitTwoByteString(const uint16_t* chars, int length) { is_one_byte_ = false; buffer16_ = chars; length_ = length; } - void Advance(unsigned consumed) { + void Advance(int consumed) { ASSERT(consumed <= length_); // Still in buffer. if (length_ != consumed) { @@ -8830,18 +9313,16 @@ class StringComparator { return; } // Advance state. - ASSERT(op_->HasMore()); - int32_t type = 0; - unsigned length = 0; - String* next = op_->ContinueOperation(&type, &length); + int offset; + String* next = op_->Next(&offset); + ASSERT_EQ(0, offset); ASSERT(next != NULL); - ConsStringNullOp null_op; - String::Visit(next, 0, *this, null_op, type, length); + String::VisitFlat(this, next); } ConsStringIteratorOp* const op_; bool is_one_byte_; - unsigned length_; + int length_; union { const uint8_t* buffer8_; const uint16_t* buffer16_; @@ -8859,18 +9340,18 @@ class StringComparator { } template<typename Chars1, typename Chars2> - static inline bool Equals(State* state_1, State* state_2, unsigned to_check) { + static inline bool Equals(State* state_1, State* state_2, int to_check) { const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_); const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_); return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check); } - bool Equals(unsigned length, String* string_1, String* string_2) { - ASSERT(length != 0); - state_1_.Init(string_1, length); - state_2_.Init(string_2, length); + bool Equals(String* string_1, String* string_2) { + int length = string_1->length(); + state_1_.Init(string_1); + state_2_.Init(string_2); while (true) { - unsigned to_check = Min(state_1_.length_, state_2_.length_); + int to_check = Min(state_1_.length_, state_2_.length_); ASSERT(to_check > 0 && to_check <= length); bool is_equal; if (state_1_.is_one_byte_) { @@ -8904,6 +9385,7 @@ class StringComparator { bool String::SlowEquals(String* other) { + DisallowHeapAllocation no_gc; // Fast check: negative check with lengths. int len = length(); if (len != other->length()) return false; @@ -8933,14 +9415,9 @@ bool String::SlowEquals(String* other) { // before we try to flatten the strings. if (this->Get(0) != other->Get(0)) return false; - String* lhs = this->TryFlattenGetString(); - String* rhs = other->TryFlattenGetString(); - - // TODO(dcarney): Compare all types of flat strings with a Visitor. - if (StringShape(lhs).IsSequentialAscii() && - StringShape(rhs).IsSequentialAscii()) { - const uint8_t* str1 = SeqOneByteString::cast(lhs)->GetChars(); - const uint8_t* str2 = SeqOneByteString::cast(rhs)->GetChars(); + if (IsSeqOneByteString() && other->IsSeqOneByteString()) { + const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars(); + const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars(); return CompareRawStringContents(str1, str2, len); } @@ -8948,7 +9425,57 @@ bool String::SlowEquals(String* other) { StringComparator comparator(isolate->objects_string_compare_iterator_a(), isolate->objects_string_compare_iterator_b()); - return comparator.Equals(static_cast<unsigned>(len), lhs, rhs); + return comparator.Equals(this, other); +} + + +bool String::SlowEquals(Handle<String> one, Handle<String> two) { + // Fast check: negative check with lengths. + int one_length = one->length(); + if (one_length != two->length()) return false; + if (one_length == 0) return true; + + // Fast check: if hash code is computed for both strings + // a fast negative check can be performed. + if (one->HasHashCode() && two->HasHashCode()) { +#ifdef ENABLE_SLOW_ASSERTS + if (FLAG_enable_slow_asserts) { + if (one->Hash() != two->Hash()) { + bool found_difference = false; + for (int i = 0; i < one_length; i++) { + if (one->Get(i) != two->Get(i)) { + found_difference = true; + break; + } + } + ASSERT(found_difference); + } + } +#endif + if (one->Hash() != two->Hash()) return false; + } + + // We know the strings are both non-empty. Compare the first chars + // before we try to flatten the strings. + if (one->Get(0) != two->Get(0)) return false; + + one = String::Flatten(one); + two = String::Flatten(two); + + DisallowHeapAllocation no_gc; + String::FlatContent flat1 = one->GetFlatContent(); + String::FlatContent flat2 = two->GetFlatContent(); + + if (flat1.IsAscii() && flat2.IsAscii()) { + return CompareRawStringContents(flat1.ToOneByteVector().start(), + flat2.ToOneByteVector().start(), + one_length); + } else { + for (int i = 0; i < one_length; i++) { + if (flat1.Get(i) != flat2.Get(i)) return false; + } + return true; + } } @@ -9033,49 +9560,31 @@ bool String::IsTwoByteEqualTo(Vector<const uc16> str) { class IteratingStringHasher: public StringHasher { public: static inline uint32_t Hash(String* string, uint32_t seed) { - const unsigned len = static_cast<unsigned>(string->length()); - IteratingStringHasher hasher(len, seed); - if (hasher.has_trivial_hash()) { - return hasher.GetHashField(); - } - int32_t type = string->map()->instance_type(); - ConsStringNullOp null_op; - String::Visit(string, 0, hasher, null_op, type, len); - // Flat strings terminate immediately. - if (hasher.consumed_ == len) { - ASSERT(!string->IsConsString()); - return hasher.GetHashField(); - } - ASSERT(string->IsConsString()); + IteratingStringHasher hasher(string->length(), seed); + // Nothing to do. + if (hasher.has_trivial_hash()) return hasher.GetHashField(); + ConsString* cons_string = String::VisitFlat(&hasher, string); + // The string was flat. + if (cons_string == NULL) return hasher.GetHashField(); // This is a ConsString, iterate across it. - ConsStringIteratorOp op; - unsigned offset = 0; - unsigned leaf_length = len; - string = op.Operate(string, &offset, &type, &leaf_length); - while (true) { - ASSERT(hasher.consumed_ < len); - String::Visit(string, 0, hasher, null_op, type, leaf_length); - if (hasher.consumed_ == len) break; - string = op.ContinueOperation(&type, &leaf_length); - // This should be taken care of by the length check. - ASSERT(string != NULL); + ConsStringIteratorOp op(cons_string); + int offset; + while (NULL != (string = op.Next(&offset))) { + String::VisitFlat(&hasher, string, offset); } return hasher.GetHashField(); } - inline void VisitOneByteString(const uint8_t* chars, unsigned length) { - AddCharacters(chars, static_cast<int>(length)); - consumed_ += length; + inline void VisitOneByteString(const uint8_t* chars, int length) { + AddCharacters(chars, length); } - inline void VisitTwoByteString(const uint16_t* chars, unsigned length) { - AddCharacters(chars, static_cast<int>(length)); - consumed_ += length; + inline void VisitTwoByteString(const uint16_t* chars, int length) { + AddCharacters(chars, length); } private: inline IteratingStringHasher(int len, uint32_t seed) - : StringHasher(len, seed), - consumed_(0) {} - unsigned consumed_; + : StringHasher(len, seed) { + } DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher); }; @@ -9156,7 +9665,6 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { } int delta = old_size - new_size; - string->set_length(new_length); Address start_of_string = string->address(); ASSERT_OBJECT_ALIGNED(start_of_string); @@ -9175,6 +9683,10 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { } heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR); + // We are storing the new length using release store after creating a filler + // for the left-over space to avoid races with the sweeper thread. + string->synchronized_set_length(new_length); + if (new_length == 0) return heap->isolate()->factory()->empty_string(); return string; } @@ -9368,11 +9880,16 @@ void Map::ClearNonLiveTransitions(Heap* heap) { } } + // Note that we never eliminate a transition array, though we might right-trim + // such that number_of_transitions() == 0. If this assumption changes, + // TransitionArray::CopyInsert() will need to deal with the case that a + // transition array disappeared during GC. int trim = t->number_of_transitions() - transition_index; if (trim > 0) { RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition() ? trim : trim * TransitionArray::kTransitionSize); } + ASSERT(HasTransitionArray()); } @@ -9493,52 +10010,38 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( Handle<Code> code, Handle<FixedArray> literals, BailoutId osr_ast_id) { - CALL_HEAP_FUNCTION_VOID( - shared->GetIsolate(), - shared->AddToOptimizedCodeMap( - *native_context, *code, *literals, osr_ast_id)); -} - - -MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, - Code* code, - FixedArray* literals, - BailoutId osr_ast_id) { + Isolate* isolate = shared->GetIsolate(); ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); ASSERT(native_context->IsNativeContext()); STATIC_ASSERT(kEntryLength == 4); - Heap* heap = GetHeap(); - FixedArray* new_code_map; - Object* value = optimized_code_map(); - Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); + Handle<FixedArray> new_code_map; + Handle<Object> value(shared->optimized_code_map(), isolate); + int old_length; if (value->IsSmi()) { // No optimized code map. - ASSERT_EQ(0, Smi::cast(value)->value()); + ASSERT_EQ(0, Smi::cast(*value)->value()); // Create 3 entries per context {context, code, literals}. - MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); - if (!maybe->To(&new_code_map)) return maybe; - new_code_map->set(kEntriesStart + kContextOffset, native_context); - new_code_map->set(kEntriesStart + kCachedCodeOffset, code); - new_code_map->set(kEntriesStart + kLiteralsOffset, literals); - new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi); + new_code_map = isolate->factory()->NewFixedArray(kInitialLength); + old_length = kEntriesStart; } else { // Copy old map and append one new entry. - FixedArray* old_code_map = FixedArray::cast(value); - ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id)); - int old_length = old_code_map->length(); - int new_length = old_length + kEntryLength; - MaybeObject* maybe = old_code_map->CopySize(new_length); - if (!maybe->To(&new_code_map)) return maybe; - new_code_map->set(old_length + kContextOffset, native_context); - new_code_map->set(old_length + kCachedCodeOffset, code); - new_code_map->set(old_length + kLiteralsOffset, literals); - new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi); + Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); + ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); + old_length = old_code_map->length(); + new_code_map = FixedArray::CopySize( + old_code_map, old_length + kEntryLength); // Zap the old map for the sake of the heap verifier. if (Heap::ShouldZapGarbage()) { Object** data = old_code_map->data_start(); - MemsetPointer(data, heap->the_hole_value(), old_length); + MemsetPointer(data, isolate->heap()->the_hole_value(), old_length); } } + new_code_map->set(old_length + kContextOffset, *native_context); + new_code_map->set(old_length + kCachedCodeOffset, *code); + new_code_map->set(old_length + kLiteralsOffset, *literals); + new_code_map->set(old_length + kOsrAstIdOffset, + Smi::FromInt(osr_ast_id.ToInt())); + #ifdef DEBUG for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext()); @@ -9549,8 +10052,7 @@ MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); } #endif - set_optimized_code_map(new_code_map); - return new_code_map; + shared->set_optimized_code_map(*new_code_map); } @@ -9592,6 +10094,7 @@ void SharedFunctionInfo::ClearOptimizedCodeMap() { void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason) { + DisallowHeapAllocation no_gc; if (optimized_code_map()->IsSmi()) return; FixedArray* code_map = FixedArray::cast(optimized_code_map()); @@ -9657,48 +10160,37 @@ void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { } -static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps( - Context* native_context, Map* initial_map) { +Handle<Object> CacheInitialJSArrayMaps( + Handle<Context> native_context, Handle<Map> initial_map) { // Replace all of the cached initial array maps in the native context with // the appropriate transitioned elements kind maps. - Heap* heap = native_context->GetHeap(); - MaybeObject* maybe_maps = - heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED); - FixedArray* maps; - if (!maybe_maps->To(&maps)) return maybe_maps; + Factory* factory = native_context->GetIsolate()->factory(); + Handle<FixedArray> maps = factory->NewFixedArrayWithHoles( + kElementsKindCount, TENURED); - Map* current_map = initial_map; + Handle<Map> current_map = initial_map; ElementsKind kind = current_map->elements_kind(); ASSERT(kind == GetInitialFastElementsKind()); - maps->set(kind, current_map); + maps->set(kind, *current_map); for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1; i < kFastElementsKindCount; ++i) { - Map* new_map; + Handle<Map> new_map; ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i); if (current_map->HasElementsTransition()) { - new_map = current_map->elements_transition_map(); + new_map = handle(current_map->elements_transition_map()); ASSERT(new_map->elements_kind() == next_kind); } else { - MaybeObject* maybe_new_map = - current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; + new_map = Map::CopyAsElementsKind( + current_map, next_kind, INSERT_TRANSITION); } - maps->set(next_kind, new_map); + maps->set(next_kind, *new_map); current_map = new_map; } - native_context->set_js_array_maps(maps); + native_context->set_js_array_maps(*maps); return initial_map; } -Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context, - Handle<Map> initial_map) { - CALL_HEAP_FUNCTION(native_context->GetIsolate(), - CacheInitialJSArrayMaps(*native_context, *initial_map), - Object); -} - - void JSFunction::SetInstancePrototype(Handle<JSFunction> function, Handle<Object> value) { ASSERT(value->IsJSReceiver()); @@ -9772,20 +10264,25 @@ void JSFunction::SetPrototype(Handle<JSFunction> function, } -void JSFunction::RemovePrototype() { +bool JSFunction::RemovePrototype() { Context* native_context = context()->native_context(); Map* no_prototype_map = shared()->strict_mode() == SLOPPY ? native_context->sloppy_function_without_prototype_map() : native_context->strict_function_without_prototype_map(); - if (map() == no_prototype_map) return; + if (map() == no_prototype_map) return true; - ASSERT(map() == (shared()->strict_mode() == SLOPPY +#ifdef DEBUG + if (map() != (shared()->strict_mode() == SLOPPY ? native_context->sloppy_function_map() - : native_context->strict_function_map())); + : native_context->strict_function_map())) { + return false; + } +#endif set_map(no_prototype_map); set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value()); + return true; } @@ -9883,20 +10380,171 @@ bool JSFunction::PassesFilter(const char* raw_filter) { } -MaybeObject* Oddball::Initialize(Heap* heap, - const char* to_string, - Object* to_number, - byte kind) { - String* internalized_to_string; - { MaybeObject* maybe_string = - heap->InternalizeUtf8String( - CStrVector(to_string)); - if (!maybe_string->To(&internalized_to_string)) return maybe_string; +void Oddball::Initialize(Isolate* isolate, + Handle<Oddball> oddball, + const char* to_string, + Handle<Object> to_number, + byte kind) { + Handle<String> internalized_to_string = + isolate->factory()->InternalizeUtf8String(to_string); + oddball->set_to_string(*internalized_to_string); + oddball->set_to_number(*to_number); + oddball->set_kind(kind); +} + + +void Script::InitLineEnds(Handle<Script> script) { + if (!script->line_ends()->IsUndefined()) return; + + Isolate* isolate = script->GetIsolate(); + + if (!script->source()->IsString()) { + ASSERT(script->source()->IsUndefined()); + Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0); + script->set_line_ends(*empty); + ASSERT(script->line_ends()->IsFixedArray()); + return; + } + + Handle<String> src(String::cast(script->source()), isolate); + + Handle<FixedArray> array = String::CalculateLineEnds(src, true); + + if (*array != isolate->heap()->empty_fixed_array()) { + array->set_map(isolate->heap()->fixed_cow_array_map()); + } + + script->set_line_ends(*array); + ASSERT(script->line_ends()->IsFixedArray()); +} + + +int Script::GetColumnNumber(Handle<Script> script, int code_pos) { + int line_number = GetLineNumber(script, code_pos); + if (line_number == -1) return -1; + + DisallowHeapAllocation no_allocation; + FixedArray* line_ends_array = FixedArray::cast(script->line_ends()); + line_number = line_number - script->line_offset()->value(); + if (line_number == 0) return code_pos + script->column_offset()->value(); + int prev_line_end_pos = + Smi::cast(line_ends_array->get(line_number - 1))->value(); + return code_pos - (prev_line_end_pos + 1); +} + + +int Script::GetLineNumberWithArray(int code_pos) { + DisallowHeapAllocation no_allocation; + ASSERT(line_ends()->IsFixedArray()); + FixedArray* line_ends_array = FixedArray::cast(line_ends()); + int line_ends_len = line_ends_array->length(); + if (line_ends_len == 0) return -1; + + if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) { + return line_offset()->value(); + } + + int left = 0; + int right = line_ends_len; + while (int half = (right - left) / 2) { + if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) { + right -= half; + } else { + left += half; + } } - set_to_string(internalized_to_string); - set_to_number(to_number); - set_kind(kind); - return this; + return right + line_offset()->value(); +} + + +int Script::GetLineNumber(Handle<Script> script, int code_pos) { + InitLineEnds(script); + return script->GetLineNumberWithArray(code_pos); +} + + +int Script::GetLineNumber(int code_pos) { + DisallowHeapAllocation no_allocation; + if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos); + + // Slow mode: we do not have line_ends. We have to iterate through source. + if (!source()->IsString()) return -1; + + String* source_string = String::cast(source()); + int line = 0; + int len = source_string->length(); + for (int pos = 0; pos < len; pos++) { + if (pos == code_pos) break; + if (source_string->Get(pos) == '\n') line++; + } + return line; +} + + +Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) { + Isolate* isolate = script->GetIsolate(); + Handle<String> name_or_source_url_key = + isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("nameOrSourceURL")); + Handle<JSObject> script_wrapper = Script::GetWrapper(script); + Handle<Object> property = Object::GetProperty( + script_wrapper, name_or_source_url_key).ToHandleChecked(); + ASSERT(property->IsJSFunction()); + Handle<JSFunction> method = Handle<JSFunction>::cast(property); + Handle<Object> result; + // Do not check against pending exception, since this function may be called + // when an exception has already been pending. + if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) { + return isolate->factory()->undefined_value(); + } + return result; +} + + +// Wrappers for scripts are kept alive and cached in weak global +// handles referred from foreign objects held by the scripts as long as +// they are used. When they are not used anymore, the garbage +// collector will call the weak callback on the global handle +// associated with the wrapper and get rid of both the wrapper and the +// handle. +static void ClearWrapperCache( + const v8::WeakCallbackData<v8::Value, void>& data) { + Object** location = reinterpret_cast<Object**>(data.GetParameter()); + JSValue* wrapper = JSValue::cast(*location); + Foreign* foreign = Script::cast(wrapper->value())->wrapper(); + ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); + foreign->set_foreign_address(0); + GlobalHandles::Destroy(location); + Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate()); + isolate->counters()->script_wrappers()->Decrement(); +} + + +Handle<JSObject> Script::GetWrapper(Handle<Script> script) { + if (script->wrapper()->foreign_address() != NULL) { + // Return a handle for the existing script wrapper from the cache. + return Handle<JSValue>( + *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address())); + } + Isolate* isolate = script->GetIsolate(); + // Construct a new script wrapper. + isolate->counters()->script_wrappers()->Increment(); + Handle<JSFunction> constructor = isolate->script_function(); + Handle<JSValue> result = + Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor)); + + result->set_value(*script); + + // Create a new weak global handle and use it to cache the wrapper + // for future use. The cache will automatically be cleared by the + // garbage collector when it is not used anymore. + Handle<Object> handle = isolate->global_handles()->Create(*result); + GlobalHandles::MakeWeak(handle.location(), + reinterpret_cast<void*>(handle.location()), + &ClearWrapperCache); + script->wrapper()->set_foreign_address( + reinterpret_cast<Address>(handle.location())); + return result; } @@ -10073,7 +10721,8 @@ void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) { set_live_objects_may_exist(true); // No tracking during the snapshot construction phase. - if (Serializer::enabled()) return; + Isolate* isolate = GetIsolate(); + if (Serializer::enabled(isolate)) return; if (map->unused_property_fields() == 0) return; @@ -10083,7 +10732,7 @@ void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) { set_construction_count(kGenerousAllocationCount); } set_initial_map(map); - Builtins* builtins = map->GetHeap()->isolate()->builtins(); + Builtins* builtins = isolate->builtins(); ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric), construct_stub()); set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown)); @@ -10132,6 +10781,9 @@ void SharedFunctionInfo::AttachInitialMap(Map* map) { void SharedFunctionInfo::ResetForNewContext(int new_ic_age) { code()->ClearInlineCaches(); + // If we clear ICs, we need to clear the type feedback vector too, since + // CallICs are synced with a feedback vector slot. + ClearTypeFeedbackInfo(); set_ic_age(new_ic_age); if (code()->kind() == Code::FUNCTION) { code()->set_profiler_ticks(0); @@ -10192,6 +10844,7 @@ void SharedFunctionInfo::CompleteInobjectSlackTracking() { int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, BailoutId osr_ast_id) { + DisallowHeapAllocation no_gc; ASSERT(native_context->IsNativeContext()); if (!FLAG_cache_optimized_code) return -1; Object* value = optimized_code_map(); @@ -10500,21 +11153,6 @@ void Code::FindAllMaps(MapHandleList* maps) { } -void Code::FindAllTypes(TypeHandleList* types) { - ASSERT(is_inline_cache_stub()); - DisallowHeapAllocation no_allocation; - int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); - for (RelocIterator it(this, mask); !it.done(); it.next()) { - RelocInfo* info = it.rinfo(); - Object* object = info->target_object(); - if (object->IsMap()) { - Handle<Map> map(Map::cast(object)); - types->Add(IC::MapToType<HeapType>(map, map->GetIsolate())); - } - } -} - - Code* Code::FindFirstHandler() { ASSERT(is_inline_cache_stub()); DisallowHeapAllocation no_allocation; @@ -10587,19 +11225,16 @@ void Code::ClearInlineCaches(Code::Kind* kind) { } -void Code::ClearTypeFeedbackInfo(Heap* heap) { - if (kind() != FUNCTION) return; - Object* raw_info = type_feedback_info(); - if (raw_info->IsTypeFeedbackInfo()) { - FixedArray* feedback_vector = - TypeFeedbackInfo::cast(raw_info)->feedback_vector(); - for (int i = 0; i < feedback_vector->length(); i++) { - Object* obj = feedback_vector->get(i); - if (!obj->IsAllocationSite()) { - // TODO(mvstanton): Can't I avoid a write barrier for this sentinel? - feedback_vector->set(i, - TypeFeedbackInfo::RawUninitializedSentinel(heap)); - } +void SharedFunctionInfo::ClearTypeFeedbackInfo() { + FixedArray* vector = feedback_vector(); + Heap* heap = GetHeap(); + for (int i = 0; i < vector->length(); i++) { + Object* obj = vector->get(i); + if (!obj->IsAllocationSite()) { + vector->set( + i, + TypeFeedbackInfo::RawUninitializedSentinel(heap), + SKIP_WRITE_BARRIER); } } } @@ -10656,10 +11291,11 @@ void Code::MakeOlder(MarkingParity current_parity) { if (sequence != NULL) { Age age; MarkingParity code_parity; - GetCodeAgeAndParity(sequence, &age, &code_parity); + Isolate* isolate = GetIsolate(); + GetCodeAgeAndParity(isolate, sequence, &age, &code_parity); age = EffectiveAge(age); if (age != kLastCodeAge && code_parity != current_parity) { - PatchPlatformCodeAge(GetIsolate(), + PatchPlatformCodeAge(isolate, sequence, static_cast<Age>(age + 1), current_parity); @@ -10695,7 +11331,7 @@ Code::Age Code::GetRawAge() { } Age age; MarkingParity parity; - GetCodeAgeAndParity(sequence, &age, &parity); + GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity); return age; } @@ -11180,8 +11816,8 @@ Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength( Handle<Map> new_map = (new_elements_kind != elements_kind) ? GetElementsTransitionMap(object, new_elements_kind) : handle(object->map()); - object->ValidateElements(); - object->set_map_and_elements(*new_map, *new_elements); + JSObject::ValidateElements(object); + JSObject::SetMapAndElements(object, new_map, new_elements); // Transition through the allocation site as well if present. JSObject::UpdateAllocationSite(object, new_elements_kind); @@ -11226,8 +11862,8 @@ void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS); accessor->CopyElements(object, elems, elements_kind); - object->ValidateElements(); - object->set_map_and_elements(*new_map, *elems); + JSObject::ValidateElements(object); + JSObject::SetMapAndElements(object, new_map, elems); if (FLAG_trace_elements_transitions) { PrintElementsTransition(stdout, object, elements_kind, old_elements, @@ -11267,10 +11903,10 @@ static bool GetOldValue(Isolate* isolate, ASSERT(attributes != ABSENT); if (attributes == DONT_DELETE) return false; Handle<Object> value; - if (object->GetLocalElementAccessorPair(index) != NULL) { + if (!JSObject::GetLocalElementAccessorPair(object, index).is_null()) { value = Handle<Object>::cast(isolate->factory()->the_hole_value()); } else { - value = Object::GetElementNoExceptionThrown(isolate, object, index); + value = Object::GetElement(isolate, object, index).ToHandleChecked(); } old_values->Add(value); indices->Add(index); @@ -11290,12 +11926,11 @@ static void EnqueueSpliceRecord(Handle<JSArray> object, Handle<Object> args[] = { object, index_object, deleted, add_count_object }; - bool threw; Execution::Call(isolate, Handle<JSFunction>(isolate->observers_enqueue_splice()), - isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, - &threw); - ASSERT(!threw); + isolate->factory()->undefined_value(), + ARRAY_SIZE(args), + args).Assert(); } @@ -11304,12 +11939,11 @@ static void BeginPerformSplice(Handle<JSArray> object) { HandleScope scope(isolate); Handle<Object> args[] = { object }; - bool threw; Execution::Call(isolate, Handle<JSFunction>(isolate->observers_begin_perform_splice()), - isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, - &threw); - ASSERT(!threw); + isolate->factory()->undefined_value(), + ARRAY_SIZE(args), + args).Assert(); } @@ -11318,17 +11952,17 @@ static void EndPerformSplice(Handle<JSArray> object) { HandleScope scope(isolate); Handle<Object> args[] = { object }; - bool threw; Execution::Call(isolate, Handle<JSFunction>(isolate->observers_end_perform_splice()), - isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, - &threw); - ASSERT(!threw); + isolate->factory()->undefined_value(), + ARRAY_SIZE(args), + args).Assert(); } -Handle<Object> JSArray::SetElementsLength(Handle<JSArray> array, - Handle<Object> new_length_handle) { +MaybeHandle<Object> JSArray::SetElementsLength( + Handle<JSArray> array, + Handle<Object> new_length_handle) { // We should never end in here with a pixel or external array. ASSERT(array->AllowsSetElementsLength()); if (!array->map()->is_observed()) { @@ -11366,9 +12000,11 @@ Handle<Object> JSArray::SetElementsLength(Handle<JSArray> array, } } - Handle<Object> hresult = - array->GetElementsAccessor()->SetLength(array, new_length_handle); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, hresult, hresult); + Handle<Object> hresult; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, hresult, + array->GetElementsAccessor()->SetLength(array, new_length_handle), + Object); CHECK(array->length()->ToArrayIndex(&new_length)); if (old_length == new_length) return hresult; @@ -11398,13 +12034,13 @@ Handle<Object> JSArray::SetElementsLength(Handle<JSArray> array, // Skip deletions where the property was an accessor, leaving holes // in the array of old values. if (old_values[i]->IsTheHole()) continue; - JSObject::SetElement(deleted, indices[i] - index, old_values[i], NONE, - SLOPPY); + JSObject::SetElement( + deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert(); } SetProperty(deleted, isolate->factory()->length_string(), isolate->factory()->NewNumberFromUint(delete_count), - NONE, SLOPPY); + NONE, SLOPPY).Assert(); } EnqueueSpliceRecord(array, index, deleted, add_count); @@ -11450,13 +12086,9 @@ Handle<Map> Map::PutPrototypeTransition(Handle<Map> map, if (capacity > kMaxCachedPrototypeTransitions) return map; // Grow array by factor 2 over and above what we need. - Factory* factory = map->GetIsolate()->factory(); - cache = factory->CopySizeFixedArray(cache, transitions * 2 * step + header); + cache = FixedArray::CopySize(cache, transitions * 2 * step + header); - CALL_AND_RETRY_OR_DIE(map->GetIsolate(), - map->SetPrototypeTransitions(*cache), - break, - return Handle<Map>()); + SetPrototypeTransitions(map, cache); } // Reload number of transitions as GC might shrink them. @@ -11492,21 +12124,42 @@ void Map::ZapPrototypeTransitions() { } -void Map::AddDependentCompilationInfo(DependentCode::DependencyGroup group, +// static +void Map::AddDependentCompilationInfo(Handle<Map> map, + DependentCode::DependencyGroup group, CompilationInfo* info) { - Handle<DependentCode> dep(dependent_code()); Handle<DependentCode> codes = - DependentCode::Insert(dep, group, info->object_wrapper()); - if (*codes != dependent_code()) set_dependent_code(*codes); - info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone()); + DependentCode::Insert(handle(map->dependent_code(), info->isolate()), + group, info->object_wrapper()); + if (*codes != map->dependent_code()) map->set_dependent_code(*codes); + info->dependencies(group)->Add(map, info->zone()); } -void Map::AddDependentCode(DependentCode::DependencyGroup group, +// static +void Map::AddDependentCode(Handle<Map> map, + DependentCode::DependencyGroup group, Handle<Code> code) { Handle<DependentCode> codes = DependentCode::Insert( - Handle<DependentCode>(dependent_code()), group, code); - if (*codes != dependent_code()) set_dependent_code(*codes); + Handle<DependentCode>(map->dependent_code()), group, code); + if (*codes != map->dependent_code()) map->set_dependent_code(*codes); +} + + +// static +void Map::AddDependentIC(Handle<Map> map, + Handle<Code> stub) { + ASSERT(stub->next_code_link()->IsUndefined()); + int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup); + if (n == 0) { + // Slow path: insert the head of the list with possible heap allocation. + Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub); + } else { + // Fast path: link the stub to the existing head of the list without any + // heap allocation. + ASSERT(n == 1); + map->dependent_code()->AddToDependentICList(stub); + } } @@ -11549,11 +12202,10 @@ Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries, if (entries->object_at(i) == *object) return entries; } if (entries->length() < kCodesStartIndex + number_of_entries + 1) { - Factory* factory = entries->GetIsolate()->factory(); int capacity = kCodesStartIndex + number_of_entries + 1; if (capacity > 5) capacity = capacity * 5 / 4; Handle<DependentCode> new_entries = Handle<DependentCode>::cast( - factory->CopySizeFixedArray(entries, capacity, TENURED)); + FixedArray::CopySize(entries, capacity, TENURED)); // The number of codes can change after GC. starts.Recompute(*entries); start = starts.at(group); @@ -11640,10 +12292,22 @@ void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group, } +static bool CodeListContains(Object* head, Code* code) { + while (!head->IsUndefined()) { + if (head == code) return true; + head = Code::cast(head)->next_code_link(); + } + return false; +} + + bool DependentCode::Contains(DependencyGroup group, Code* code) { GroupStartIndexes starts(this); int start = starts.at(group); int end = starts.at(group + 1); + if (group == kWeakICGroup) { + return CodeListContains(object_at(start), code); + } for (int i = start; i < end; i++) { if (object_at(i) == code) return true; } @@ -11700,9 +12364,30 @@ void DependentCode::DeoptimizeDependentCodeGroup( } -Handle<Object> JSObject::SetPrototype(Handle<JSObject> object, - Handle<Object> value, - bool skip_hidden_prototypes) { +void DependentCode::AddToDependentICList(Handle<Code> stub) { + DisallowHeapAllocation no_heap_allocation; + GroupStartIndexes starts(this); + int i = starts.at(kWeakICGroup); + stub->set_next_code_link(object_at(i)); + set_object_at(i, *stub); +} + + +Handle<Map> Map::TransitionToPrototype(Handle<Map> map, + Handle<Object> prototype) { + Handle<Map> new_map = GetPrototypeTransition(map, prototype); + if (new_map.is_null()) { + new_map = Copy(map); + PutPrototypeTransition(map, prototype, new_map); + new_map->set_prototype(*prototype); + } + return new_map; +} + + +MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object, + Handle<Object> value, + bool skip_hidden_prototypes) { #ifdef DEBUG int size = object->Size(); #endif @@ -11725,8 +12410,7 @@ Handle<Object> JSObject::SetPrototype(Handle<JSObject> object, Handle<Object> args[] = { object }; Handle<Object> error = isolate->factory()->NewTypeError( "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } // Before we can set the prototype we need to be sure @@ -11740,8 +12424,7 @@ Handle<Object> JSObject::SetPrototype(Handle<JSObject> object, // Cycle detected. Handle<Object> error = isolate->factory()->NewError( "cyclic_proto", HandleVector<Object>(NULL, 0)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } @@ -11770,12 +12453,7 @@ Handle<Object> JSObject::SetPrototype(Handle<JSObject> object, JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value)); } - Handle<Map> new_map = Map::GetPrototypeTransition(map, value); - if (new_map.is_null()) { - new_map = Map::Copy(map); - Map::PutPrototypeTransition(map, value, new_map); - new_map->set_prototype(*value); - } + Handle<Map> new_map = Map::TransitionToPrototype(map, value); ASSERT(new_map->prototype() == *value); JSObject::MigrateToMap(real_receiver, new_map); @@ -11806,39 +12484,44 @@ void JSObject::EnsureCanContainElements(Handle<JSObject> object, } -AccessorPair* JSObject::GetLocalPropertyAccessorPair(Name* name) { +MaybeHandle<AccessorPair> JSObject::GetLocalPropertyAccessorPair( + Handle<JSObject> object, + Handle<Name> name) { uint32_t index = 0; if (name->AsArrayIndex(&index)) { - return GetLocalElementAccessorPair(index); + return GetLocalElementAccessorPair(object, index); } - LookupResult lookup(GetIsolate()); - LocalLookupRealNamedProperty(name, &lookup); + Isolate* isolate = object->GetIsolate(); + LookupResult lookup(isolate); + object->LocalLookupRealNamedProperty(name, &lookup); if (lookup.IsPropertyCallbacks() && lookup.GetCallbackObject()->IsAccessorPair()) { - return AccessorPair::cast(lookup.GetCallbackObject()); + return handle(AccessorPair::cast(lookup.GetCallbackObject()), isolate); } - return NULL; + return MaybeHandle<AccessorPair>(); } -AccessorPair* JSObject::GetLocalElementAccessorPair(uint32_t index) { - if (IsJSGlobalProxy()) { - Object* proto = GetPrototype(); - if (proto->IsNull()) return NULL; +MaybeHandle<AccessorPair> JSObject::GetLocalElementAccessorPair( + Handle<JSObject> object, + uint32_t index) { + if (object->IsJSGlobalProxy()) { + Handle<Object> proto(object->GetPrototype(), object->GetIsolate()); + if (proto->IsNull()) return MaybeHandle<AccessorPair>(); ASSERT(proto->IsJSGlobalObject()); - return JSObject::cast(proto)->GetLocalElementAccessorPair(index); + return GetLocalElementAccessorPair(Handle<JSObject>::cast(proto), index); } // Check for lookup interceptor. - if (HasIndexedInterceptor()) return NULL; + if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>(); - return GetElementsAccessor()->GetAccessorPair(this, this, index); + return object->GetElementsAccessor()->GetAccessorPair(object, object, index); } -Handle<Object> JSObject::SetElementWithInterceptor( +MaybeHandle<Object> JSObject::SetElementWithInterceptor( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -11862,7 +12545,7 @@ Handle<Object> JSObject::SetElementWithInterceptor( *object); v8::Handle<v8::Value> result = args.Call(setter, index, v8::Utils::ToLocal(value)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) return value; } @@ -11873,76 +12556,72 @@ Handle<Object> JSObject::SetElementWithInterceptor( } -MaybeObject* JSObject::GetElementWithCallback(Object* receiver, - Object* structure, - uint32_t index, - Object* holder) { - Isolate* isolate = GetIsolate(); +MaybeHandle<Object> JSObject::GetElementWithCallback( + Handle<JSObject> object, + Handle<Object> receiver, + Handle<Object> structure, + uint32_t index, + Handle<Object> holder) { + Isolate* isolate = object->GetIsolate(); ASSERT(!structure->IsForeign()); - // api style callbacks. if (structure->IsExecutableAccessorInfo()) { - Handle<ExecutableAccessorInfo> data( - ExecutableAccessorInfo::cast(structure)); + Handle<ExecutableAccessorInfo> data = + Handle<ExecutableAccessorInfo>::cast(structure); Object* fun_obj = data->getter(); v8::AccessorGetterCallback call_fun = v8::ToCData<v8::AccessorGetterCallback>(fun_obj); - if (call_fun == NULL) return isolate->heap()->undefined_value(); - HandleScope scope(isolate); - Handle<JSObject> self(JSObject::cast(receiver)); - Handle<JSObject> holder_handle(JSObject::cast(holder)); + if (call_fun == NULL) return isolate->factory()->undefined_value(); + Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder); Handle<Object> number = isolate->factory()->NewNumberFromUint(index); Handle<String> key = isolate->factory()->NumberToString(number); - LOG(isolate, ApiNamedPropertyAccess("load", *self, *key)); + LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key)); PropertyCallbackArguments - args(isolate, data->data(), *self, *holder_handle); + args(isolate, data->data(), *receiver, *holder_handle); v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key)); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); - if (result.IsEmpty()) return isolate->heap()->undefined_value(); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); + if (result.IsEmpty()) return isolate->factory()->undefined_value(); Handle<Object> result_internal = v8::Utils::OpenHandle(*result); result_internal->VerifyApiCallResultType(); - return *result_internal; + // Rebox handle before return. + return handle(*result_internal, isolate); } // __defineGetter__ callback if (structure->IsAccessorPair()) { - Object* getter = AccessorPair::cast(structure)->getter(); + Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), + isolate); if (getter->IsSpecFunction()) { // TODO(rossberg): nicer would be to cast to some JSCallable here... - return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter)); + return GetPropertyWithDefinedGetter( + object, receiver, Handle<JSReceiver>::cast(getter)); } // Getter is not a function. - return isolate->heap()->undefined_value(); + return isolate->factory()->undefined_value(); } if (structure->IsDeclaredAccessorInfo()) { - return GetDeclaredAccessorProperty(receiver, - DeclaredAccessorInfo::cast(structure), - isolate); + return GetDeclaredAccessorProperty( + receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate); } UNREACHABLE(); - return NULL; + return MaybeHandle<Object>(); } -Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, - Handle<Object> structure, - uint32_t index, - Handle<Object> value, - Handle<JSObject> holder, - StrictMode strict_mode) { +MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, + Handle<Object> structure, + uint32_t index, + Handle<Object> value, + Handle<JSObject> holder, + StrictMode strict_mode) { Isolate* isolate = object->GetIsolate(); // We should never get here to initialize a const with the hole // value since a const declaration would conflict with the setter. ASSERT(!value->IsTheHole()); - - // To accommodate both the old and the new api we switch on the - // data structure used to store the callbacks. Eventually foreign - // callbacks should be phased out. ASSERT(!structure->IsForeign()); - if (structure->IsExecutableAccessorInfo()) { // api style callbacks Handle<ExecutableAccessorInfo> data = @@ -11959,7 +12638,7 @@ Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, args.Call(call_fun, v8::Utils::ToLocal(key), v8::Utils::ToLocal(value)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return value; } @@ -11975,8 +12654,7 @@ Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, Handle<Object> args[2] = { key, holder }; Handle<Object> error = isolate->factory()->NewTypeError( "no_setter_in_callback", HandleVector(args, 2)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } @@ -11984,7 +12662,7 @@ Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, if (structure->IsDeclaredAccessorInfo()) return value; UNREACHABLE(); - return Handle<Object>(); + return MaybeHandle<Object>(); } @@ -12015,11 +12693,11 @@ bool JSObject::HasDictionaryArgumentsElements() { // Adding n elements in fast case is O(n*n). // Note: revisit design to have dual undefined values to capture absent // elements. -Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, - uint32_t index, - Handle<Object> value, - StrictMode strict_mode, - bool check_prototype) { +MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode, + bool check_prototype) { ASSERT(object->HasFastSmiOrObjectElements() || object->HasFastArgumentsElements()); @@ -12046,7 +12724,7 @@ Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, if (check_prototype && (index >= capacity || backing_store->get(index)->IsTheHole())) { bool found; - Handle<Object> result = SetElementWithCallbackSetterInPrototypes( + MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( object, index, value, &found, strict_mode); if (found) return result; } @@ -12105,7 +12783,7 @@ Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length); FixedDoubleArray::cast(object->elements())->set(index, value->Number()); - object->ValidateElements(); + JSObject::ValidateElements(object); return value; } // Change elements kind from Smi-only to generic FAST if necessary. @@ -12129,7 +12807,7 @@ Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, SetFastElementsCapacityAndLength(object, new_capacity, array_length, smi_mode); new_elements->set(index, *value); - object->ValidateElements(); + JSObject::ValidateElements(object); return value; } @@ -12143,13 +12821,14 @@ Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, } -Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, - uint32_t index, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - bool check_prototype, - SetPropertyMode set_mode) { +MaybeHandle<Object> JSObject::SetDictionaryElement( + Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + bool check_prototype, + SetPropertyMode set_mode) { ASSERT(object->HasDictionaryElements() || object->HasDictionaryArgumentsElements()); Isolate* isolate = object->GetIsolate(); @@ -12187,8 +12866,7 @@ Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, Handle<Object> error = isolate->factory()->NewTypeError("strict_read_only_property", HandleVector(args, 2)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } // Elements of the arguments object in slow mode might be slow aliases. @@ -12209,8 +12887,8 @@ Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, // Can cause GC! if (check_prototype) { bool found; - Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object, - index, value, &found, strict_mode); + MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( + object, index, value, &found, strict_mode); if (found) return result; } @@ -12226,8 +12904,7 @@ Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, Handle<Object> error = isolate->factory()->NewTypeError("object_not_extensible", HandleVector(args, 1)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } } @@ -12275,7 +12952,7 @@ Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, SetFastElementsCapacityAndLength(object, new_length, new_length, smi_mode); } - object->ValidateElements(); + JSObject::ValidateElements(object); #ifdef DEBUG if (FLAG_trace_normalization) { PrintF("Object elements are fast case again:\n"); @@ -12286,7 +12963,7 @@ Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, return value; } -Handle<Object> JSObject::SetFastDoubleElement( +MaybeHandle<Object> JSObject::SetFastDoubleElement( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -12303,8 +12980,8 @@ Handle<Object> JSObject::SetFastDoubleElement( (index >= elms_length || Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) { bool found; - Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object, - index, value, &found, strict_mode); + MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( + object, index, value, &found, strict_mode); if (found) return result; } @@ -12323,11 +13000,12 @@ Handle<Object> JSObject::SetFastDoubleElement( if (!value->IsNumber()) { SetFastElementsCapacityAndLength(object, elms_length, length, kDontAllowSmiElements); - Handle<Object> result = SetFastElement(object, index, value, strict_mode, - check_prototype); - RETURN_IF_EMPTY_HANDLE_VALUE(object->GetIsolate(), result, - Handle<Object>()); - object->ValidateElements(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + object->GetIsolate(), result, + SetFastElement(object, index, value, strict_mode, check_prototype), + Object); + JSObject::ValidateElements(object); return result; } @@ -12367,7 +13045,7 @@ Handle<Object> JSObject::SetFastDoubleElement( ASSERT(static_cast<uint32_t>(new_capacity) > index); SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1); FixedDoubleArray::cast(object->elements())->set(index, double_value); - object->ValidateElements(); + JSObject::ValidateElements(object); return value; } } @@ -12384,11 +13062,11 @@ Handle<Object> JSObject::SetFastDoubleElement( } -Handle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, - uint32_t index, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode) { +MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, + uint32_t index, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode) { if (object->IsJSProxy()) { return JSProxy::SetElementWithHandler( Handle<JSProxy>::cast(object), object, index, value, strict_mode); @@ -12398,40 +13076,38 @@ Handle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, } -Handle<Object> JSObject::SetOwnElement(Handle<JSObject> object, - uint32_t index, - Handle<Object> value, - StrictMode strict_mode) { +MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode) { ASSERT(!object->HasExternalArrayElements()); return JSObject::SetElement(object, index, value, NONE, strict_mode, false); } -Handle<Object> JSObject::SetElement(Handle<JSObject> object, - uint32_t index, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - bool check_prototype, - SetPropertyMode set_mode) { +MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + bool check_prototype, + SetPropertyMode set_mode) { Isolate* isolate = object->GetIsolate(); if (object->HasExternalArrayElements() || object->HasFixedTypedArrayElements()) { if (!value->IsNumber() && !value->IsUndefined()) { - bool has_exception; - Handle<Object> number = - Execution::ToNumber(isolate, value, &has_exception); - if (has_exception) return Handle<Object>(); - value = number; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, + Execution::ToNumber(isolate, value), Object); } } // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_SET)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return value; } } @@ -12454,8 +13130,7 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, Handle<Object> args[] = { object, number }; Handle<Object> error = isolate->factory()->NewTypeError( "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args))); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } // Normalize the elements to enable attributes on the property. @@ -12467,13 +13142,10 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, if (!object->map()->is_observed()) { return object->HasIndexedInterceptor() - ? SetElementWithInterceptor(object, index, value, attributes, strict_mode, - check_prototype, - set_mode) + ? SetElementWithInterceptor(object, index, value, attributes, + strict_mode, check_prototype, set_mode) : SetElementWithoutInterceptor(object, index, value, attributes, - strict_mode, - check_prototype, - set_mode); + strict_mode, check_prototype, set_mode); } PropertyAttributes old_attributes = @@ -12483,8 +13155,8 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, Handle<Object> new_length_handle; if (old_attributes != ABSENT) { - if (object->GetLocalElementAccessorPair(index) == NULL) { - old_value = Object::GetElementNoExceptionThrown(isolate, object, index); + if (GetLocalElementAccessorPair(object, index).is_null()) { + old_value = Object::GetElement(isolate, object, index).ToHandleChecked(); } } else if (object->IsJSArray()) { // Store old array length in case adding an element grows the array. @@ -12493,15 +13165,17 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, } // Check for lookup interceptor - Handle<Object> result = object->HasIndexedInterceptor() - ? SetElementWithInterceptor(object, index, value, attributes, strict_mode, - check_prototype, - set_mode) - : SetElementWithoutInterceptor(object, index, value, attributes, - strict_mode, - check_prototype, - set_mode); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + object->HasIndexedInterceptor() + ? SetElementWithInterceptor( + object, index, value, attributes, + strict_mode, check_prototype, set_mode) + : SetElementWithoutInterceptor( + object, index, value, attributes, + strict_mode, check_prototype, set_mode), + Object); Handle<String> name = isolate->factory()->Uint32ToString(index); PropertyAttributes new_attributes = GetLocalElementAttribute(object, index); @@ -12531,7 +13205,7 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, EnqueueChangeRecord(object, "reconfigure", name, old_value); } else { Handle<Object> new_value = - Object::GetElementNoExceptionThrown(isolate, object, index); + Object::GetElement(isolate, object, index).ToHandleChecked(); bool value_changed = !old_value->SameValue(*new_value); if (old_attributes != new_attributes) { if (!value_changed) old_value = isolate->factory()->the_hole_value(); @@ -12545,7 +13219,7 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object, } -Handle<Object> JSObject::SetElementWithoutInterceptor( +MaybeHandle<Object> JSObject::SetElementWithoutInterceptor( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -12559,12 +13233,12 @@ Handle<Object> JSObject::SetElementWithoutInterceptor( Isolate* isolate = object->GetIsolate(); if (FLAG_trace_external_array_abuse && IsExternalArrayElementsKind(object->GetElementsKind())) { - CheckArrayAbuse(*object, "external elements write", index); + CheckArrayAbuse(object, "external elements write", index); } if (FLAG_trace_js_array_abuse && !IsExternalArrayElementsKind(object->GetElementsKind())) { if (object->IsJSArray()) { - CheckArrayAbuse(*object, "elements write", index, true); + CheckArrayAbuse(object, "elements write", index, true); } } switch (object->GetElementsKind()) { @@ -12744,30 +13418,9 @@ void JSObject::UpdateAllocationSite(Handle<JSObject> object, Handle<AllocationSite> site; { DisallowHeapAllocation no_allocation; - // Check if there is potentially a memento behind the object. If - // the last word of the momento is on another page we return - // immediatelly. - Address object_address = object->address(); - Address memento_address = object_address + JSArray::kSize; - Address last_memento_word_address = memento_address + kPointerSize; - if (!NewSpacePage::OnSamePage(object_address, - last_memento_word_address)) { - return; - } - - // Either object is the last object in the new space, or there is another - // object of at least word size (the header map word) following it, so - // suffices to compare ptr and top here. - Address top = heap->NewSpaceTop(); - ASSERT(memento_address == top || - memento_address + HeapObject::kHeaderSize <= top); - if (memento_address == top) return; - - HeapObject* candidate = HeapObject::FromAddress(memento_address); - if (candidate->map() != heap->allocation_memento_map()) return; - AllocationMemento* memento = AllocationMemento::cast(candidate); - if (!memento->IsValid()) return; + AllocationMemento* memento = heap->FindAllocationMemento(*object); + if (memento == NULL) return; // Walk through to the Allocation Site site = handle(memento->GetAllocationSite()); @@ -12826,7 +13479,7 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object, if (IsFastSmiElementsKind(from_kind) && IsFastDoubleElementsKind(to_kind)) { SetFastDoubleElementsCapacityAndLength(object, capacity, length); - object->ValidateElements(); + JSObject::ValidateElements(object); return; } @@ -12834,7 +13487,7 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object, IsFastObjectElementsKind(to_kind)) { SetFastElementsCapacityAndLength(object, capacity, length, kDontAllowSmiElements); - object->ValidateElements(); + JSObject::ValidateElements(object); return; } @@ -12861,32 +13514,22 @@ bool Map::IsValidElementsTransition(ElementsKind from_kind, void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array, uint32_t index, Handle<Object> value) { - CALL_HEAP_FUNCTION_VOID(array->GetIsolate(), - array->JSArrayUpdateLengthFromIndex(index, *value)); -} - - -MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index, - Object* value) { uint32_t old_len = 0; - CHECK(length()->ToArrayIndex(&old_len)); + CHECK(array->length()->ToArrayIndex(&old_len)); // Check to see if we need to update the length. For now, we make // sure that the length stays within 32-bits (unsigned). if (index >= old_len && index != 0xffffffff) { - Object* len; - { MaybeObject* maybe_len = - GetHeap()->NumberFromDouble(static_cast<double>(index) + 1); - if (!maybe_len->ToObject(&len)) return maybe_len; - } - set_length(len); + Handle<Object> len = array->GetIsolate()->factory()->NewNumber( + static_cast<double>(index) + 1); + array->set_length(*len); } - return value; } -Handle<Object> JSObject::GetElementWithInterceptor(Handle<JSObject> object, - Handle<Object> receiver, - uint32_t index) { +MaybeHandle<Object> JSObject::GetElementWithInterceptor( + Handle<JSObject> object, + Handle<Object> receiver, + uint32_t index) { Isolate* isolate = object->GetIsolate(); // Make sure that the top context does not change when doing @@ -12902,18 +13545,20 @@ Handle<Object> JSObject::GetElementWithInterceptor(Handle<JSObject> object, PropertyCallbackArguments args(isolate, interceptor->data(), *receiver, *object); v8::Handle<v8::Value> result = args.Call(getter, index); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) { Handle<Object> result_internal = v8::Utils::OpenHandle(*result); result_internal->VerifyApiCallResultType(); // Rebox handle before return. - return Handle<Object>(*result_internal, isolate); + return handle(*result_internal, isolate); } } ElementsAccessor* handler = object->GetElementsAccessor(); - Handle<Object> result = handler->Get(receiver, object, index); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, handler->Get(receiver, object, index), + Object); if (!result->IsTheHole()) return result; Handle<Object> proto(object->GetPrototype(), isolate); @@ -13108,12 +13753,12 @@ bool JSObject::ShouldConvertToFastDoubleElements( // together, so even though this function belongs in objects-debug.cc, // we keep it here instead to satisfy certain compilers. #ifdef OBJECT_PRINT -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::Print(FILE* out) { - int capacity = HashTable<Shape, Key>::Capacity(); +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::Print(FILE* out) { + int capacity = DerivedHashTable::Capacity(); for (int i = 0; i < capacity; i++) { - Object* k = HashTable<Shape, Key>::KeyAt(i); - if (HashTable<Shape, Key>::IsKey(k)) { + Object* k = DerivedHashTable::KeyAt(i); + if (DerivedHashTable::IsKey(k)) { PrintF(out, " "); if (k->IsString()) { String::cast(k)->StringPrint(out); @@ -13129,15 +13774,15 @@ void Dictionary<Shape, Key>::Print(FILE* out) { #endif -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) { +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) { int pos = 0; - int capacity = HashTable<Shape, Key>::Capacity(); + int capacity = DerivedHashTable::Capacity(); DisallowHeapAllocation no_gc; WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); for (int i = 0; i < capacity; i++) { - Object* k = Dictionary<Shape, Key>::KeyAt(i); - if (Dictionary<Shape, Key>::IsKey(k)) { + Object* k = Dictionary::KeyAt(i); + if (Dictionary::IsKey(k)) { elements->set(pos++, ValueAt(i), mode); } } @@ -13165,7 +13810,7 @@ InterceptorInfo* JSObject::GetIndexedInterceptor() { } -Handle<Object> JSObject::GetPropertyPostInterceptor( +MaybeHandle<Object> JSObject::GetPropertyPostInterceptor( Handle<JSObject> object, Handle<Object> receiver, Handle<Name> name, @@ -13173,36 +13818,20 @@ Handle<Object> JSObject::GetPropertyPostInterceptor( // Check local property in holder, ignore interceptor. Isolate* isolate = object->GetIsolate(); LookupResult lookup(isolate); - object->LocalLookupRealNamedProperty(*name, &lookup); - Handle<Object> result; + object->LocalLookupRealNamedProperty(name, &lookup); if (lookup.IsFound()) { - result = GetProperty(object, receiver, &lookup, name, attributes); + return GetProperty(object, receiver, &lookup, name, attributes); } else { // Continue searching via the prototype chain. Handle<Object> prototype(object->GetPrototype(), isolate); *attributes = ABSENT; if (prototype->IsNull()) return isolate->factory()->undefined_value(); - result = GetPropertyWithReceiver(prototype, receiver, name, attributes); - } - return result; -} - - -MaybeObject* JSObject::GetLocalPropertyPostInterceptor( - Object* receiver, - Name* name, - PropertyAttributes* attributes) { - // Check local property in holder, ignore interceptor. - LookupResult result(GetIsolate()); - LocalLookupRealNamedProperty(name, &result); - if (result.IsFound()) { - return GetProperty(receiver, &result, name, attributes); + return GetPropertyWithReceiver(prototype, receiver, name, attributes); } - return GetHeap()->undefined_value(); } -Handle<Object> JSObject::GetPropertyWithInterceptor( +MaybeHandle<Object> JSObject::GetPropertyWithInterceptor( Handle<JSObject> object, Handle<Object> receiver, Handle<Name> name, @@ -13224,12 +13853,12 @@ Handle<Object> JSObject::GetPropertyWithInterceptor( args(isolate, interceptor->data(), *receiver, *object); v8::Handle<v8::Value> result = args.Call(getter, v8::Utils::ToLocal(name_string)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!result.IsEmpty()) { *attributes = NONE; Handle<Object> result_internal = v8::Utils::OpenHandle(*result); result_internal->VerifyApiCallResultType(); - // Rebox handle to escape this scope. + // Rebox handle before return. return handle(*result_internal, isolate); } } @@ -13238,20 +13867,72 @@ Handle<Object> JSObject::GetPropertyWithInterceptor( } +// Compute the property keys from the interceptor. +// TODO(rossberg): support symbols in API, and filter here if needed. +MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor( + Handle<JSObject> object, Handle<JSReceiver> receiver) { + Isolate* isolate = receiver->GetIsolate(); + Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); + PropertyCallbackArguments + args(isolate, interceptor->data(), *receiver, *object); + v8::Handle<v8::Object> result; + if (!interceptor->enumerator()->IsUndefined()) { + v8::NamedPropertyEnumeratorCallback enum_fun = + v8::ToCData<v8::NamedPropertyEnumeratorCallback>( + interceptor->enumerator()); + LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object)); + result = args.Call(enum_fun); + } + if (result.IsEmpty()) return MaybeHandle<JSObject>(); +#if ENABLE_EXTRA_CHECKS + CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || + v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); +#endif + // Rebox before returning. + return handle(*v8::Utils::OpenHandle(*result), isolate); +} + + +// Compute the element keys from the interceptor. +MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor( + Handle<JSObject> object, Handle<JSReceiver> receiver) { + Isolate* isolate = receiver->GetIsolate(); + Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); + PropertyCallbackArguments + args(isolate, interceptor->data(), *receiver, *object); + v8::Handle<v8::Object> result; + if (!interceptor->enumerator()->IsUndefined()) { + v8::IndexedPropertyEnumeratorCallback enum_fun = + v8::ToCData<v8::IndexedPropertyEnumeratorCallback>( + interceptor->enumerator()); + LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object)); + result = args.Call(enum_fun); + } + if (result.IsEmpty()) return MaybeHandle<JSObject>(); +#if ENABLE_EXTRA_CHECKS + CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || + v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); +#endif + // Rebox before returning. + return handle(*v8::Utils::OpenHandle(*result), isolate); +} + + bool JSObject::HasRealNamedProperty(Handle<JSObject> object, Handle<Name> key) { Isolate* isolate = object->GetIsolate(); SealHandleScope shs(isolate); // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); + if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return false; } } LookupResult result(isolate); - object->LocalLookupRealNamedProperty(*key, &result); + object->LocalLookupRealNamedProperty(key, &result); return result.IsFound() && !result.IsInterceptor(); } @@ -13261,8 +13942,9 @@ bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) { HandleScope scope(isolate); // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); + if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return false; } } @@ -13286,14 +13968,15 @@ bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object, SealHandleScope shs(isolate); // Check access rights if needed. if (object->IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS); + if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); + // TODO(yangguo): Issue 3269, check for scheduled exception missing? return false; } } LookupResult result(isolate); - object->LocalLookupRealNamedProperty(*key, &result); + object->LocalLookupRealNamedProperty(key, &result); return result.IsPropertyCallbacks(); } @@ -13630,8 +14313,8 @@ class StringKey : public HashTableKey { // StringSharedKeys are used as keys in the eval cache. class StringSharedKey : public HashTableKey { public: - StringSharedKey(String* source, - SharedFunctionInfo* shared, + StringSharedKey(Handle<String> source, + Handle<SharedFunctionInfo> shared, StrictMode strict_mode, int scope_position) : source_(source), @@ -13639,11 +14322,12 @@ class StringSharedKey : public HashTableKey { strict_mode_(strict_mode), scope_position_(scope_position) { } - bool IsMatch(Object* other) { + bool IsMatch(Object* other) V8_OVERRIDE { + DisallowHeapAllocation no_allocation; if (!other->IsFixedArray()) return false; FixedArray* other_array = FixedArray::cast(other); SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); - if (shared != shared_) return false; + if (shared != *shared_) return false; int strict_unchecked = Smi::cast(other_array->get(2))->value(); ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT); StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); @@ -13651,7 +14335,7 @@ class StringSharedKey : public HashTableKey { int scope_position = Smi::cast(other_array->get(3))->value(); if (scope_position != scope_position_) return false; String* source = String::cast(other_array->get(1)); - return source->Equals(source_); + return source->Equals(*source_); } static uint32_t StringSharedHashHelper(String* source, @@ -13665,7 +14349,7 @@ class StringSharedKey : public HashTableKey { // script source code and the start position of the calling scope. // We do this to ensure that the cache entries can survive garbage // collection. - Script* script = Script::cast(shared->script()); + Script* script(Script::cast(shared->script())); hash ^= String::cast(script->source())->Hash(); if (strict_mode == STRICT) hash ^= 0x8000; hash += scope_position; @@ -13673,12 +14357,13 @@ class StringSharedKey : public HashTableKey { return hash; } - uint32_t Hash() { - return StringSharedHashHelper( - source_, shared_, strict_mode_, scope_position_); + uint32_t Hash() V8_OVERRIDE { + return StringSharedHashHelper(*source_, *shared_, strict_mode_, + scope_position_); } - uint32_t HashForObject(Object* obj) { + uint32_t HashForObject(Object* obj) V8_OVERRIDE { + DisallowHeapAllocation no_allocation; FixedArray* other_array = FixedArray::cast(obj); SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); String* source = String::cast(other_array->get(1)); @@ -13690,22 +14375,19 @@ class StringSharedKey : public HashTableKey { source, shared, strict_mode, scope_position); } - MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { - Object* obj; - { MaybeObject* maybe_obj = heap->AllocateFixedArray(4); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* other_array = FixedArray::cast(obj); - other_array->set(0, shared_); - other_array->set(1, source_); - other_array->set(2, Smi::FromInt(strict_mode_)); - other_array->set(3, Smi::FromInt(scope_position_)); - return other_array; + + Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { + Handle<FixedArray> array = isolate->factory()->NewFixedArray(4); + array->set(0, *shared_); + array->set(1, *source_); + array->set(2, Smi::FromInt(strict_mode_)); + array->set(3, Smi::FromInt(scope_position_)); + return array; } private: - String* source_; - SharedFunctionInfo* shared_; + Handle<String> source_; + Handle<SharedFunctionInfo> shared_; StrictMode strict_mode_; int scope_position_; }; @@ -13714,7 +14396,7 @@ class StringSharedKey : public HashTableKey { // RegExpKey carries the source and flags of a regular expression as key. class RegExpKey : public HashTableKey { public: - RegExpKey(String* string, JSRegExp::Flags flags) + RegExpKey(Handle<String> string, JSRegExp::Flags flags) : string_(string), flags_(Smi::FromInt(flags.value())) { } @@ -13722,22 +14404,22 @@ class RegExpKey : public HashTableKey { // stored value is stored where the key should be. IsMatch then // compares the search key to the found object, rather than comparing // a key to a key. - bool IsMatch(Object* obj) { + bool IsMatch(Object* obj) V8_OVERRIDE { FixedArray* val = FixedArray::cast(obj); return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex))) && (flags_ == val->get(JSRegExp::kFlagsIndex)); } - uint32_t Hash() { return RegExpHash(string_, flags_); } + uint32_t Hash() V8_OVERRIDE { return RegExpHash(*string_, flags_); } - Object* AsObject(Heap* heap) { + Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { // Plain hash maps, which is where regexp keys are used, don't // use this function. UNREACHABLE(); - return NULL; + return MaybeHandle<Object>().ToHandleChecked(); } - uint32_t HashForObject(Object* obj) { + uint32_t HashForObject(Object* obj) V8_OVERRIDE { FixedArray* val = FixedArray::cast(obj); return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)), Smi::cast(val->get(JSRegExp::kFlagsIndex))); @@ -13747,20 +14429,20 @@ class RegExpKey : public HashTableKey { return string->Hash() + flags->value(); } - String* string_; + Handle<String> string_; Smi* flags_; }; -MaybeObject* OneByteStringKey::AsObject(Heap* heap) { +Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) { if (hash_field_ == 0) Hash(); - return heap->AllocateOneByteInternalizedString(string_, hash_field_); + return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_); } -MaybeObject* TwoByteStringKey::AsObject(Heap* heap) { +Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) { if (hash_field_ == 0) Hash(); - return heap->AllocateTwoByteInternalizedString(string_, hash_field_); + return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_); } @@ -13781,19 +14463,18 @@ const uint16_t* SubStringKey<uint16_t>::GetChars() { template<> -MaybeObject* SubStringKey<uint8_t>::AsObject(Heap* heap) { +Handle<Object> SubStringKey<uint8_t>::AsHandle(Isolate* isolate) { if (hash_field_ == 0) Hash(); Vector<const uint8_t> chars(GetChars() + from_, length_); - return heap->AllocateOneByteInternalizedString(chars, hash_field_); + return isolate->factory()->NewOneByteInternalizedString(chars, hash_field_); } template<> -MaybeObject* SubStringKey<uint16_t>::AsObject( - Heap* heap) { +Handle<Object> SubStringKey<uint16_t>::AsHandle(Isolate* isolate) { if (hash_field_ == 0) Hash(); Vector<const uint16_t> chars(GetChars() + from_, length_); - return heap->AllocateTwoByteInternalizedString(chars, hash_field_); + return isolate->factory()->NewTwoByteInternalizedString(chars, hash_field_); } @@ -13818,32 +14499,31 @@ template class SubStringKey<uint16_t>; // InternalizedStringKey carries a string/internalized-string object as key. class InternalizedStringKey : public HashTableKey { public: - explicit InternalizedStringKey(String* string) + explicit InternalizedStringKey(Handle<String> string) : string_(string) { } - bool IsMatch(Object* string) { - return String::cast(string)->Equals(string_); + virtual bool IsMatch(Object* string) V8_OVERRIDE { + return String::cast(string)->Equals(*string_); } - uint32_t Hash() { return string_->Hash(); } + virtual uint32_t Hash() V8_OVERRIDE { return string_->Hash(); } - uint32_t HashForObject(Object* other) { + virtual uint32_t HashForObject(Object* other) V8_OVERRIDE { return String::cast(other)->Hash(); } - MaybeObject* AsObject(Heap* heap) { - // Attempt to flatten the string, so that internalized strings will most - // often be flat strings. - string_ = string_->TryFlattenGetString(); + virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { // Internalize the string if possible. - Map* map = heap->InternalizedStringMapForString(string_); - if (map != NULL) { - string_->set_map_no_write_barrier(map); + MaybeHandle<Map> maybe_map = + isolate->factory()->InternalizedStringMapForString(string_); + Handle<Map> map; + if (maybe_map.ToHandle(&map)) { + string_->set_map_no_write_barrier(*map); ASSERT(string_->IsInternalizedString()); return string_; } // Otherwise allocate a new internalized string. - return heap->AllocateInternalizedStringImpl( + return isolate->factory()->NewInternalizedStringImpl( string_, string_->length(), string_->hash_field()); } @@ -13851,30 +14531,32 @@ class InternalizedStringKey : public HashTableKey { return String::cast(obj)->Hash(); } - String* string_; + Handle<String> string_; }; -template<typename Shape, typename Key> -void HashTable<Shape, Key>::IteratePrefix(ObjectVisitor* v) { +template<typename Derived, typename Shape, typename Key> +void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) { IteratePointers(v, 0, kElementsStartOffset); } -template<typename Shape, typename Key> -void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) { +template<typename Derived, typename Shape, typename Key> +void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) { IteratePointers(v, kElementsStartOffset, kHeaderSize + length() * kPointerSize); } -template<typename Shape, typename Key> -MaybeObject* HashTable<Shape, Key>::Allocate(Heap* heap, - int at_least_space_for, - MinimumCapacity capacity_option, - PretenureFlag pretenure) { - ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for)); +template<typename Derived, typename Shape, typename Key> +Handle<Derived> HashTable<Derived, Shape, Key>::New( + Isolate* isolate, + int at_least_space_for, + MinimumCapacity capacity_option, + PretenureFlag pretenure) { + ASSERT(0 <= at_least_space_for); + ASSERT(!capacity_option || IsPowerOf2(at_least_space_for)); int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY) ? at_least_space_for : ComputeCapacity(at_least_space_for); @@ -13882,22 +14564,23 @@ MaybeObject* HashTable<Shape, Key>::Allocate(Heap* heap, v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); } - Object* obj; - { MaybeObject* maybe_obj = - heap-> AllocateHashTable(EntryToIndex(capacity), pretenure); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - HashTable::cast(obj)->SetNumberOfElements(0); - HashTable::cast(obj)->SetNumberOfDeletedElements(0); - HashTable::cast(obj)->SetCapacity(capacity); - return obj; + Factory* factory = isolate->factory(); + int length = EntryToIndex(capacity); + Handle<FixedArray> array = factory->NewFixedArray(length, pretenure); + array->set_map_no_write_barrier(*factory->hash_table_map()); + Handle<Derived> table = Handle<Derived>::cast(array); + + table->SetNumberOfElements(0); + table->SetNumberOfDeletedElements(0); + table->SetCapacity(capacity); + return table; } // Find entry for key otherwise return kNotFound. -int NameDictionary::FindEntry(Name* key) { +int NameDictionary::FindEntry(Handle<Name> key) { if (!key->IsUniqueName()) { - return HashTable<NameDictionaryShape, Name*>::FindEntry(key); + return DerivedHashTable::FindEntry(key); } // Optimized for unique names. Knowledge of the key type allows: @@ -13918,24 +14601,26 @@ int NameDictionary::FindEntry(Name* key) { int index = EntryToIndex(entry); Object* element = get(index); if (element->IsUndefined()) break; // Empty entry. - if (key == element) return entry; + if (*key == element) return entry; if (!element->IsUniqueName() && !element->IsTheHole() && - Name::cast(element)->Equals(key)) { + Name::cast(element)->Equals(*key)) { // Replace a key that is a non-internalized string by the equivalent // internalized string for faster further lookups. - set(index, key); + set(index, *key); return entry; } - ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(key)); + ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(*key)); entry = NextProbe(entry, count++, capacity); } return kNotFound; } -template<typename Shape, typename Key> -MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { +template<typename Derived, typename Shape, typename Key> +void HashTable<Derived, Shape, Key>::Rehash( + Handle<Derived> new_table, + Key key) { ASSERT(NumberOfElements() < new_table->Capacity()); DisallowHeapAllocation no_gc; @@ -13954,7 +14639,7 @@ MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { uint32_t from_index = EntryToIndex(i); Object* k = get(from_index); if (IsKey(k)) { - uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k); + uint32_t hash = HashTable::HashForObject(key, k); uint32_t insertion_index = EntryToIndex(new_table->FindInsertionEntry(hash)); for (int j = 0; j < Shape::kEntrySize; j++) { @@ -13964,16 +14649,16 @@ MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { } new_table->SetNumberOfElements(NumberOfElements()); new_table->SetNumberOfDeletedElements(0); - return new_table; } -template<typename Shape, typename Key> -uint32_t HashTable<Shape, Key>::EntryForProbe(Key key, - Object* k, - int probe, - uint32_t expected) { - uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k); +template<typename Derived, typename Shape, typename Key> +uint32_t HashTable<Derived, Shape, Key>::EntryForProbe( + Key key, + Object* k, + int probe, + uint32_t expected) { + uint32_t hash = HashTable::HashForObject(key, k); uint32_t capacity = Capacity(); uint32_t entry = FirstProbe(hash, capacity); for (int i = 1; i < probe; i++) { @@ -13984,10 +14669,10 @@ uint32_t HashTable<Shape, Key>::EntryForProbe(Key key, } -template<typename Shape, typename Key> -void HashTable<Shape, Key>::Swap(uint32_t entry1, - uint32_t entry2, - WriteBarrierMode mode) { +template<typename Derived, typename Shape, typename Key> +void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1, + uint32_t entry2, + WriteBarrierMode mode) { int index1 = EntryToIndex(entry1); int index2 = EntryToIndex(entry2); Object* temp[Shape::kEntrySize]; @@ -14003,8 +14688,8 @@ void HashTable<Shape, Key>::Swap(uint32_t entry1, } -template<typename Shape, typename Key> -void HashTable<Shape, Key>::Rehash(Key key) { +template<typename Derived, typename Shape, typename Key> +void HashTable<Derived, Shape, Key>::Rehash(Key key) { DisallowHeapAllocation no_gc; WriteBarrierMode mode = GetWriteBarrierMode(no_gc); uint32_t capacity = Capacity(); @@ -14036,71 +14721,73 @@ void HashTable<Shape, Key>::Rehash(Key key) { } -template<typename Shape, typename Key> -MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, - Key key, - PretenureFlag pretenure) { - int capacity = Capacity(); - int nof = NumberOfElements() + n; - int nod = NumberOfDeletedElements(); +template<typename Derived, typename Shape, typename Key> +Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity( + Handle<Derived> table, + int n, + Key key, + PretenureFlag pretenure) { + Isolate* isolate = table->GetIsolate(); + int capacity = table->Capacity(); + int nof = table->NumberOfElements() + n; + int nod = table->NumberOfDeletedElements(); // Return if: // 50% is still free after adding n elements and // at most 50% of the free elements are deleted elements. if (nod <= (capacity - nof) >> 1) { int needed_free = nof >> 1; - if (nof + needed_free <= capacity) return this; + if (nof + needed_free <= capacity) return table; } const int kMinCapacityForPretenure = 256; bool should_pretenure = pretenure == TENURED || - ((capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this)); - Object* obj; - { MaybeObject* maybe_obj = - Allocate(GetHeap(), - nof * 2, - USE_DEFAULT_MINIMUM_CAPACITY, - should_pretenure ? TENURED : NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } + ((capacity > kMinCapacityForPretenure) && + !isolate->heap()->InNewSpace(*table)); + Handle<Derived> new_table = HashTable::New( + isolate, + nof * 2, + USE_DEFAULT_MINIMUM_CAPACITY, + should_pretenure ? TENURED : NOT_TENURED); - return Rehash(HashTable::cast(obj), key); + table->Rehash(new_table, key); + return new_table; } -template<typename Shape, typename Key> -MaybeObject* HashTable<Shape, Key>::Shrink(Key key) { - int capacity = Capacity(); - int nof = NumberOfElements(); +template<typename Derived, typename Shape, typename Key> +Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table, + Key key) { + int capacity = table->Capacity(); + int nof = table->NumberOfElements(); // Shrink to fit the number of elements if only a quarter of the // capacity is filled with elements. - if (nof > (capacity >> 2)) return this; + if (nof > (capacity >> 2)) return table; // Allocate a new dictionary with room for at least the current // number of elements. The allocation method will make sure that // there is extra room in the dictionary for additions. Don't go // lower than room for 16 elements. int at_least_room_for = nof; - if (at_least_room_for < 16) return this; + if (at_least_room_for < 16) return table; + Isolate* isolate = table->GetIsolate(); const int kMinCapacityForPretenure = 256; bool pretenure = (at_least_room_for > kMinCapacityForPretenure) && - !GetHeap()->InNewSpace(this); - Object* obj; - { MaybeObject* maybe_obj = - Allocate(GetHeap(), - at_least_room_for, - USE_DEFAULT_MINIMUM_CAPACITY, - pretenure ? TENURED : NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } + !isolate->heap()->InNewSpace(*table); + Handle<Derived> new_table = HashTable::New( + isolate, + at_least_room_for, + USE_DEFAULT_MINIMUM_CAPACITY, + pretenure ? TENURED : NOT_TENURED); - return Rehash(HashTable::cast(obj), key); + table->Rehash(new_table, key); + return new_table; } -template<typename Shape, typename Key> -uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) { +template<typename Derived, typename Shape, typename Key> +uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) { uint32_t capacity = Capacity(); uint32_t entry = FirstProbe(hash, capacity); uint32_t count = 1; @@ -14117,221 +14804,238 @@ uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) { // Force instantiation of template instances class. // Please note this list is compiler dependent. -template class HashTable<StringTableShape, HashTableKey*>; +template class HashTable<StringTable, StringTableShape, HashTableKey*>; -template class HashTable<CompilationCacheShape, HashTableKey*>; +template class HashTable<CompilationCacheTable, + CompilationCacheShape, + HashTableKey*>; -template class HashTable<MapCacheShape, HashTableKey*>; +template class HashTable<MapCache, MapCacheShape, HashTableKey*>; -template class HashTable<ObjectHashTableShape<1>, Object*>; +template class HashTable<ObjectHashTable, + ObjectHashTableShape, + Handle<Object> >; -template class HashTable<ObjectHashTableShape<2>, Object*>; +template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >; -template class HashTable<WeakHashTableShape<2>, Object*>; +template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >; -template class Dictionary<NameDictionaryShape, Name*>; +template class Dictionary<SeededNumberDictionary, + SeededNumberDictionaryShape, + uint32_t>; -template class Dictionary<SeededNumberDictionaryShape, uint32_t>; +template class Dictionary<UnseededNumberDictionary, + UnseededNumberDictionaryShape, + uint32_t>; -template class Dictionary<UnseededNumberDictionaryShape, uint32_t>; +template Handle<SeededNumberDictionary> +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + New(Isolate*, int at_least_space_for, PretenureFlag pretenure); -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: - Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure); +template Handle<UnseededNumberDictionary> +Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: + New(Isolate*, int at_least_space_for, PretenureFlag pretenure); -template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: - Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure); +template Handle<NameDictionary> +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + New(Isolate*, int n, PretenureFlag pretenure); -template MaybeObject* Dictionary<NameDictionaryShape, Name*>:: - Allocate(Heap* heap, int n, PretenureFlag pretenure); +template Handle<SeededNumberDictionary> +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>); -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::AtPut( - uint32_t, Object*); +template Handle<UnseededNumberDictionary> +Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: + AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>); -template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: - AtPut(uint32_t, Object*); - -template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>:: +template Object* +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: SlowReverseLookup(Object* value); -template Object* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: +template Object* +Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: SlowReverseLookup(Object* value); -template Object* Dictionary<NameDictionaryShape, Name*>::SlowReverseLookup( - Object*); - -template void Dictionary<SeededNumberDictionaryShape, uint32_t>::CopyKeysTo( - FixedArray*, - PropertyAttributes, - Dictionary<SeededNumberDictionaryShape, uint32_t>::SortMode); - -template Object* Dictionary<NameDictionaryShape, Name*>::DeleteProperty( - int, JSObject::DeleteMode); - -template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>:: - DeleteProperty(int, JSObject::DeleteMode); - -template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Shrink(Name* n); - -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Shrink( - uint32_t); +template Object* +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + SlowReverseLookup(Object* value); -template void Dictionary<NameDictionaryShape, Name*>::CopyKeysTo( - FixedArray*, - int, - PropertyAttributes, - Dictionary<NameDictionaryShape, Name*>::SortMode); +template void +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + CopyKeysTo( + FixedArray*, + PropertyAttributes, + Dictionary<SeededNumberDictionary, + SeededNumberDictionaryShape, + uint32_t>::SortMode); + +template Handle<Object> +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty( + Handle<NameDictionary>, int, JSObject::DeleteMode); + +template Handle<Object> +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode); + +template Handle<NameDictionary> +HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: + New(Isolate*, int, MinimumCapacity, PretenureFlag); + +template Handle<NameDictionary> +HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: + Shrink(Handle<NameDictionary>, Handle<Name>); + +template Handle<SeededNumberDictionary> +HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + Shrink(Handle<SeededNumberDictionary>, uint32_t); + +template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + CopyKeysTo( + FixedArray*, + int, + PropertyAttributes, + Dictionary< + NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode); template int -Dictionary<NameDictionaryShape, Name*>::NumberOfElementsFilterAttributes( - PropertyAttributes); +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + NumberOfElementsFilterAttributes(PropertyAttributes); -template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Add( - Name*, Object*, PropertyDetails); +template Handle<NameDictionary> +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add( + Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails); -template MaybeObject* -Dictionary<NameDictionaryShape, Name*>::GenerateNewEnumerationIndices(); +template void +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + GenerateNewEnumerationIndices(Handle<NameDictionary>); template int -Dictionary<SeededNumberDictionaryShape, uint32_t>:: +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: NumberOfElementsFilterAttributes(PropertyAttributes); -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Add( - uint32_t, Object*, PropertyDetails); - -template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::Add( - uint32_t, Object*, PropertyDetails); +template Handle<SeededNumberDictionary> +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + Add(Handle<SeededNumberDictionary>, + uint32_t, + Handle<Object>, + PropertyDetails); -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: - EnsureCapacity(int, uint32_t); +template Handle<UnseededNumberDictionary> +Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: + Add(Handle<UnseededNumberDictionary>, + uint32_t, + Handle<Object>, + PropertyDetails); -template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: - EnsureCapacity(int, uint32_t); +template Handle<SeededNumberDictionary> +Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t); -template MaybeObject* Dictionary<NameDictionaryShape, Name*>:: - EnsureCapacity(int, Name*); +template Handle<UnseededNumberDictionary> +Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: + EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t); -template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: - AddEntry(uint32_t, Object*, PropertyDetails, uint32_t); - -template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: - AddEntry(uint32_t, Object*, PropertyDetails, uint32_t); - -template MaybeObject* Dictionary<NameDictionaryShape, Name*>::AddEntry( - Name*, Object*, PropertyDetails, uint32_t); +template Handle<NameDictionary> +Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>); template -int Dictionary<SeededNumberDictionaryShape, uint32_t>::NumberOfEnumElements(); +int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + NumberOfEnumElements(); template -int Dictionary<NameDictionaryShape, Name*>::NumberOfEnumElements(); +int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: + NumberOfEnumElements(); template -int HashTable<SeededNumberDictionaryShape, uint32_t>::FindEntry(uint32_t); +int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: + FindEntry(uint32_t); Handle<Object> JSObject::PrepareSlowElementsForSort( Handle<JSObject> object, uint32_t limit) { - CALL_HEAP_FUNCTION(object->GetIsolate(), - object->PrepareSlowElementsForSort(limit), - Object); -} - - -// Collates undefined and unexisting elements below limit from position -// zero of the elements. The object stays in Dictionary mode. -MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) { - ASSERT(HasDictionaryElements()); + ASSERT(object->HasDictionaryElements()); + Isolate* isolate = object->GetIsolate(); // Must stay in dictionary mode, either because of requires_slow_elements, // or because we are not going to sort (and therefore compact) all of the // elements. - SeededNumberDictionary* dict = element_dictionary(); - HeapNumber* result_double = NULL; - if (limit > static_cast<uint32_t>(Smi::kMaxValue)) { - // Allocate space for result before we start mutating the object. - Object* new_double; - { MaybeObject* maybe_new_double = GetHeap()->AllocateHeapNumber(0.0); - if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double; - } - result_double = HeapNumber::cast(new_double); - } - - Object* obj; - { MaybeObject* maybe_obj = - SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements()); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj); - - DisallowHeapAllocation no_alloc; + Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate); + Handle<SeededNumberDictionary> new_dict = + SeededNumberDictionary::New(isolate, dict->NumberOfElements()); uint32_t pos = 0; uint32_t undefs = 0; int capacity = dict->Capacity(); + Handle<Smi> bailout(Smi::FromInt(-1), isolate); + // Entry to the new dictionary does not cause it to grow, as we have + // allocated one that is large enough for all entries. + DisallowHeapAllocation no_gc; for (int i = 0; i < capacity; i++) { Object* k = dict->KeyAt(i); - if (dict->IsKey(k)) { - ASSERT(k->IsNumber()); - ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); - ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); - ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); - Object* value = dict->ValueAt(i); - PropertyDetails details = dict->DetailsAt(i); - if (details.type() == CALLBACKS || details.IsReadOnly()) { - // Bail out and do the sorting of undefineds and array holes in JS. - // Also bail out if the element is not supposed to be moved. - return Smi::FromInt(-1); - } - uint32_t key = NumberToUint32(k); - // In the following we assert that adding the entry to the new dictionary - // does not cause GC. This is the case because we made sure to allocate - // the dictionary big enough above, so it need not grow. - if (key < limit) { - if (value->IsUndefined()) { - undefs++; - } else { - if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { - // Adding an entry with the key beyond smi-range requires - // allocation. Bailout. - return Smi::FromInt(-1); - } - new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked(); - pos++; - } + if (!dict->IsKey(k)) continue; + + ASSERT(k->IsNumber()); + ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); + ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); + ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); + + HandleScope scope(isolate); + Handle<Object> value(dict->ValueAt(i), isolate); + PropertyDetails details = dict->DetailsAt(i); + if (details.type() == CALLBACKS || details.IsReadOnly()) { + // Bail out and do the sorting of undefineds and array holes in JS. + // Also bail out if the element is not supposed to be moved. + return bailout; + } + + uint32_t key = NumberToUint32(k); + if (key < limit) { + if (value->IsUndefined()) { + undefs++; + } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { + // Adding an entry with the key beyond smi-range requires + // allocation. Bailout. + return bailout; } else { - if (key > static_cast<uint32_t>(Smi::kMaxValue)) { - // Adding an entry with the key beyond smi-range requires - // allocation. Bailout. - return Smi::FromInt(-1); - } - new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked(); + Handle<Object> result = SeededNumberDictionary::AddNumberEntry( + new_dict, pos, value, details); + ASSERT(result.is_identical_to(new_dict)); + USE(result); + pos++; } + } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) { + // Adding an entry with the key beyond smi-range requires + // allocation. Bailout. + return bailout; + } else { + Handle<Object> result = SeededNumberDictionary::AddNumberEntry( + new_dict, key, value, details); + ASSERT(result.is_identical_to(new_dict)); + USE(result); } } uint32_t result = pos; PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0); - Heap* heap = GetHeap(); while (undefs > 0) { if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { // Adding an entry with the key beyond smi-range requires // allocation. Bailout. - return Smi::FromInt(-1); + return bailout; } - new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)-> - ToObjectUnchecked(); + HandleScope scope(isolate); + Handle<Object> result = SeededNumberDictionary::AddNumberEntry( + new_dict, pos, isolate->factory()->undefined_value(), no_details); + ASSERT(result.is_identical_to(new_dict)); + USE(result); pos++; undefs--; } - set_elements(new_dict); - - if (result <= static_cast<uint32_t>(Smi::kMaxValue)) { - return Smi::FromInt(static_cast<int>(result)); - } + object->set_elements(*new_dict); - ASSERT_NE(NULL, result_double); - result_double->set_value(static_cast<double>(result)); - return result_double; + AllowHeapAllocation allocate_return_value; + return isolate->factory()->NewNumberFromUint(result); } @@ -14365,9 +15069,9 @@ Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object, Handle<FixedArray> fast_elements = isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure); dict->CopyValuesTo(*fast_elements); - object->ValidateElements(); + JSObject::ValidateElements(object); - object->set_map_and_elements(*new_map, *fast_elements); + JSObject::SetMapAndElements(object, new_map, fast_elements); } else if (object->HasExternalArrayElements() || object->HasFixedTypedArrayElements()) { // Typed arrays cannot have holes or undefined elements. @@ -14502,11 +15206,14 @@ size_t JSTypedArray::element_size() { } -Object* ExternalUint8ClampedArray::SetValue(uint32_t index, Object* value) { +Handle<Object> ExternalUint8ClampedArray::SetValue( + Handle<ExternalUint8ClampedArray> array, + uint32_t index, + Handle<Object> value) { uint8_t clamped_value = 0; - if (index < static_cast<uint32_t>(length())) { + if (index < static_cast<uint32_t>(array->length())) { if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); + int int_value = Handle<Smi>::cast(value)->value(); if (int_value < 0) { clamped_value = 0; } else if (int_value > 255) { @@ -14515,7 +15222,7 @@ Object* ExternalUint8ClampedArray::SetValue(uint32_t index, Object* value) { clamped_value = static_cast<uint8_t>(int_value); } } else if (value->IsHeapNumber()) { - double double_value = HeapNumber::cast(value)->value(); + double double_value = Handle<HeapNumber>::cast(value)->value(); if (!(double_value > 0)) { // NaN and less than zero clamp to zero. clamped_value = 0; @@ -14531,32 +15238,25 @@ Object* ExternalUint8ClampedArray::SetValue(uint32_t index, Object* value) { // converted to a number type further up in the call chain. ASSERT(value->IsUndefined()); } - set(index, clamped_value); + array->set(index, clamped_value); } - return Smi::FromInt(clamped_value); + return handle(Smi::FromInt(clamped_value), array->GetIsolate()); } -Handle<Object> ExternalUint8ClampedArray::SetValue( - Handle<ExternalUint8ClampedArray> array, +template<typename ExternalArrayClass, typename ValueType> +static Handle<Object> ExternalArrayIntSetter( + Isolate* isolate, + Handle<ExternalArrayClass> receiver, uint32_t index, Handle<Object> value) { - return Handle<Object>(array->SetValue(index, *value), array->GetIsolate()); -} - - -template<typename ExternalArrayClass, typename ValueType> -static MaybeObject* ExternalArrayIntSetter(Heap* heap, - ExternalArrayClass* receiver, - uint32_t index, - Object* value) { ValueType cast_value = 0; if (index < static_cast<uint32_t>(receiver->length())) { if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); + int int_value = Handle<Smi>::cast(value)->value(); cast_value = static_cast<ValueType>(int_value); } else if (value->IsHeapNumber()) { - double double_value = HeapNumber::cast(value)->value(); + double double_value = Handle<HeapNumber>::cast(value)->value(); cast_value = static_cast<ValueType>(DoubleToInt32(double_value)); } else { // Clamp undefined to zero (default). All other types have been @@ -14565,88 +15265,47 @@ static MaybeObject* ExternalArrayIntSetter(Heap* heap, } receiver->set(index, cast_value); } - return heap->NumberFromInt32(cast_value); + return isolate->factory()->NewNumberFromInt(cast_value); } Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array, uint32_t index, Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalInt8Array::SetValue(uint32_t index, Object* value) { - return ExternalArrayIntSetter<ExternalInt8Array, int8_t> - (GetHeap(), this, index, value); + return ExternalArrayIntSetter<ExternalInt8Array, int8_t>( + array->GetIsolate(), array, index, value); } -Handle<Object> ExternalUint8Array::SetValue( - Handle<ExternalUint8Array> array, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); +Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array, + uint32_t index, + Handle<Object> value) { + return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>( + array->GetIsolate(), array, index, value); } -MaybeObject* ExternalUint8Array::SetValue(uint32_t index, - Object* value) { - return ExternalArrayIntSetter<ExternalUint8Array, uint8_t> - (GetHeap(), this, index, value); +Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array, + uint32_t index, + Handle<Object> value) { + return ExternalArrayIntSetter<ExternalInt16Array, int16_t>( + array->GetIsolate(), array, index, value); } -Handle<Object> ExternalInt16Array::SetValue( - Handle<ExternalInt16Array> array, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalInt16Array::SetValue(uint32_t index, - Object* value) { - return ExternalArrayIntSetter<ExternalInt16Array, int16_t> - (GetHeap(), this, index, value); -} - - -Handle<Object> ExternalUint16Array::SetValue( - Handle<ExternalUint16Array> array, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalUint16Array::SetValue(uint32_t index, - Object* value) { - return ExternalArrayIntSetter<ExternalUint16Array, uint16_t> - (GetHeap(), this, index, value); +Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array, + uint32_t index, + Handle<Object> value) { + return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>( + array->GetIsolate(), array, index, value); } Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalInt32Array::SetValue(uint32_t index, Object* value) { - return ExternalArrayIntSetter<ExternalInt32Array, int32_t> - (GetHeap(), this, index, value); + uint32_t index, + Handle<Object> value) { + return ExternalArrayIntSetter<ExternalInt32Array, int32_t>( + array->GetIsolate(), array, index, value); } @@ -14654,30 +15313,22 @@ Handle<Object> ExternalUint32Array::SetValue( Handle<ExternalUint32Array> array, uint32_t index, Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalUint32Array::SetValue(uint32_t index, Object* value) { uint32_t cast_value = 0; - Heap* heap = GetHeap(); - if (index < static_cast<uint32_t>(length())) { + if (index < static_cast<uint32_t>(array->length())) { if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); + int int_value = Handle<Smi>::cast(value)->value(); cast_value = static_cast<uint32_t>(int_value); } else if (value->IsHeapNumber()) { - double double_value = HeapNumber::cast(value)->value(); + double double_value = Handle<HeapNumber>::cast(value)->value(); cast_value = static_cast<uint32_t>(DoubleToUint32(double_value)); } else { // Clamp undefined to zero (default). All other types have been // converted to a number type further up in the call chain. ASSERT(value->IsUndefined()); } - set(index, cast_value); + array->set(index, cast_value); } - return heap->NumberFromUint32(cast_value); + return array->GetIsolate()->factory()->NewNumberFromUint(cast_value); } @@ -14685,30 +15336,22 @@ Handle<Object> ExternalFloat32Array::SetValue( Handle<ExternalFloat32Array> array, uint32_t index, Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalFloat32Array::SetValue(uint32_t index, Object* value) { float cast_value = static_cast<float>(OS::nan_value()); - Heap* heap = GetHeap(); - if (index < static_cast<uint32_t>(length())) { + if (index < static_cast<uint32_t>(array->length())) { if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); + int int_value = Handle<Smi>::cast(value)->value(); cast_value = static_cast<float>(int_value); } else if (value->IsHeapNumber()) { - double double_value = HeapNumber::cast(value)->value(); + double double_value = Handle<HeapNumber>::cast(value)->value(); cast_value = static_cast<float>(double_value); } else { // Clamp undefined to NaN (default). All other types have been // converted to a number type further up in the call chain. ASSERT(value->IsUndefined()); } - set(index, cast_value); + array->set(index, cast_value); } - return heap->AllocateHeapNumber(cast_value); + return array->GetIsolate()->factory()->NewNumber(cast_value); } @@ -14716,29 +15359,18 @@ Handle<Object> ExternalFloat64Array::SetValue( Handle<ExternalFloat64Array> array, uint32_t index, Handle<Object> value) { - CALL_HEAP_FUNCTION(array->GetIsolate(), - array->SetValue(index, *value), - Object); -} - - -MaybeObject* ExternalFloat64Array::SetValue(uint32_t index, Object* value) { double double_value = OS::nan_value(); - Heap* heap = GetHeap(); - if (index < static_cast<uint32_t>(length())) { - if (value->IsSmi()) { - int int_value = Smi::cast(value)->value(); - double_value = static_cast<double>(int_value); - } else if (value->IsHeapNumber()) { - double_value = HeapNumber::cast(value)->value(); + if (index < static_cast<uint32_t>(array->length())) { + if (value->IsNumber()) { + double_value = value->Number(); } else { // Clamp undefined to NaN (default). All other types have been // converted to a number type further up in the call chain. ASSERT(value->IsUndefined()); } - set(index, double_value); + array->set(index, double_value); } - return heap->AllocateHeapNumber(double_value); + return array->GetIsolate()->factory()->NewNumber(double_value); } @@ -14753,14 +15385,14 @@ Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( Handle<JSGlobalObject> global, Handle<Name> name) { ASSERT(!global->HasFastProperties()); - int entry = global->property_dictionary()->FindEntry(*name); + int entry = global->property_dictionary()->FindEntry(name); if (entry == NameDictionary::kNotFound) { Isolate* isolate = global->GetIsolate(); Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell( isolate->factory()->the_hole_value()); PropertyDetails details(NONE, NORMAL, 0); details = details.AsDeleted(); - Handle<NameDictionary> dictionary = NameDictionaryAdd( + Handle<NameDictionary> dictionary = NameDictionary::Add( handle(global->property_dictionary()), name, cell, details); global->set_properties(*dictionary); return cell; @@ -14772,12 +15404,6 @@ Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( } -MaybeObject* StringTable::LookupString(String* string, Object** s) { - InternalizedStringKey key(string); - return LookupKey(&key, s); -} - - // This class is used for looking up two character strings in the string table. // If we don't have a hit we don't want to waste much time so we unroll the // string hash calculation loop here for speed. Doesn't work if the two @@ -14814,7 +15440,7 @@ class TwoCharHashTableKey : public HashTableKey { #endif } - bool IsMatch(Object* o) { + bool IsMatch(Object* o) V8_OVERRIDE { if (!o->IsString()) return false; String* other = String::cast(o); if (other->length() != 2) return false; @@ -14822,17 +15448,17 @@ class TwoCharHashTableKey : public HashTableKey { return other->Get(1) == c2_; } - uint32_t Hash() { return hash_; } - uint32_t HashForObject(Object* key) { + uint32_t Hash() V8_OVERRIDE { return hash_; } + uint32_t HashForObject(Object* key) V8_OVERRIDE { if (!key->IsString()) return 0; return String::cast(key)->Hash(); } - Object* AsObject(Heap* heap) { + Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { // The TwoCharHashTableKey is only used for looking in the string // table, not for adding to it. UNREACHABLE(); - return NULL; + return MaybeHandle<Object>().ToHandleChecked(); } private: @@ -14842,179 +15468,171 @@ class TwoCharHashTableKey : public HashTableKey { }; -bool StringTable::LookupStringIfExists(String* string, String** result) { +MaybeHandle<String> StringTable::InternalizeStringIfExists( + Isolate* isolate, + Handle<String> string) { + if (string->IsInternalizedString()) { + return string; + } + return LookupStringIfExists(isolate, string); +} + + +MaybeHandle<String> StringTable::LookupStringIfExists( + Isolate* isolate, + Handle<String> string) { + Handle<StringTable> string_table = isolate->factory()->string_table(); InternalizedStringKey key(string); - int entry = FindEntry(&key); + int entry = string_table->FindEntry(&key); if (entry == kNotFound) { - return false; + return MaybeHandle<String>(); } else { - *result = String::cast(KeyAt(entry)); + Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); ASSERT(StringShape(*result).IsInternalized()); - return true; + return result; } } -bool StringTable::LookupTwoCharsStringIfExists(uint16_t c1, - uint16_t c2, - String** result) { - TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed()); - int entry = FindEntry(&key); +MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists( + Isolate* isolate, + uint16_t c1, + uint16_t c2) { + Handle<StringTable> string_table = isolate->factory()->string_table(); + TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed()); + int entry = string_table->FindEntry(&key); if (entry == kNotFound) { - return false; + return MaybeHandle<String>(); } else { - *result = String::cast(KeyAt(entry)); + Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); ASSERT(StringShape(*result).IsInternalized()); - return true; + return result; } } -MaybeObject* StringTable::LookupKey(HashTableKey* key, Object** s) { - int entry = FindEntry(key); +Handle<String> StringTable::LookupString(Isolate* isolate, + Handle<String> string) { + InternalizedStringKey key(string); + return LookupKey(isolate, &key); +} + + +Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) { + Handle<StringTable> table = isolate->factory()->string_table(); + int entry = table->FindEntry(key); // String already in table. if (entry != kNotFound) { - *s = KeyAt(entry); - return this; + return handle(String::cast(table->KeyAt(entry)), isolate); } // Adding new string. Grow table if needed. - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } + table = StringTable::EnsureCapacity(table, 1, key); // Create string object. - Object* string; - { MaybeObject* maybe_string = key->AsObject(GetHeap()); - if (!maybe_string->ToObject(&string)) return maybe_string; - } - - // If the string table grew as part of EnsureCapacity, obj is not - // the current string table and therefore we cannot use - // StringTable::cast here. - StringTable* table = reinterpret_cast<StringTable*>(obj); + Handle<Object> string = key->AsHandle(isolate); + // There must be no attempts to internalize strings that could throw + // InvalidStringLength error. + CHECK(!string.is_null()); // Add the new string and return it along with the string table. entry = table->FindInsertionEntry(key->Hash()); - table->set(EntryToIndex(entry), string); + table->set(EntryToIndex(entry), *string); table->ElementAdded(); - *s = string; - return table; + + isolate->factory()->set_string_table(table); + return Handle<String>::cast(string); } -Object* CompilationCacheTable::Lookup(String* src, Context* context) { - SharedFunctionInfo* shared = context->closure()->shared(); - StringSharedKey key(src, - shared, - FLAG_use_strict ? STRICT : SLOPPY, +Handle<Object> CompilationCacheTable::Lookup(Handle<String> src, + Handle<Context> context) { + Isolate* isolate = GetIsolate(); + Handle<SharedFunctionInfo> shared(context->closure()->shared()); + StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, RelocInfo::kNoPosition); int entry = FindEntry(&key); - if (entry == kNotFound) return GetHeap()->undefined_value(); - return get(EntryToIndex(entry) + 1); + if (entry == kNotFound) return isolate->factory()->undefined_value(); + return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); } -Object* CompilationCacheTable::LookupEval(String* src, - Context* context, - StrictMode strict_mode, - int scope_position) { - StringSharedKey key(src, - context->closure()->shared(), - strict_mode, - scope_position); +Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src, + Handle<Context> context, + StrictMode strict_mode, + int scope_position) { + Isolate* isolate = GetIsolate(); + Handle<SharedFunctionInfo> shared(context->closure()->shared()); + StringSharedKey key(src, shared, strict_mode, scope_position); int entry = FindEntry(&key); - if (entry == kNotFound) return GetHeap()->undefined_value(); - return get(EntryToIndex(entry) + 1); + if (entry == kNotFound) return isolate->factory()->undefined_value(); + return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); } -Object* CompilationCacheTable::LookupRegExp(String* src, - JSRegExp::Flags flags) { +Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src, + JSRegExp::Flags flags) { + Isolate* isolate = GetIsolate(); + DisallowHeapAllocation no_allocation; RegExpKey key(src, flags); int entry = FindEntry(&key); - if (entry == kNotFound) return GetHeap()->undefined_value(); - return get(EntryToIndex(entry) + 1); + if (entry == kNotFound) return isolate->factory()->undefined_value(); + return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); } -MaybeObject* CompilationCacheTable::Put(String* src, - Context* context, - Object* value) { - SharedFunctionInfo* shared = context->closure()->shared(); - StringSharedKey key(src, - shared, - FLAG_use_strict ? STRICT : SLOPPY, +Handle<CompilationCacheTable> CompilationCacheTable::Put( + Handle<CompilationCacheTable> cache, Handle<String> src, + Handle<Context> context, Handle<Object> value) { + Isolate* isolate = cache->GetIsolate(); + Handle<SharedFunctionInfo> shared(context->closure()->shared()); + StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, RelocInfo::kNoPosition); - CompilationCacheTable* cache; - MaybeObject* maybe_cache = EnsureCapacity(1, &key); - if (!maybe_cache->To(&cache)) return maybe_cache; - - Object* k; - MaybeObject* maybe_k = key.AsObject(GetHeap()); - if (!maybe_k->To(&k)) return maybe_k; - + cache = EnsureCapacity(cache, 1, &key); + Handle<Object> k = key.AsHandle(isolate); int entry = cache->FindInsertionEntry(key.Hash()); - cache->set(EntryToIndex(entry), k); - cache->set(EntryToIndex(entry) + 1, value); + cache->set(EntryToIndex(entry), *k); + cache->set(EntryToIndex(entry) + 1, *value); cache->ElementAdded(); return cache; } -MaybeObject* CompilationCacheTable::PutEval(String* src, - Context* context, - SharedFunctionInfo* value, - int scope_position) { - StringSharedKey key(src, - context->closure()->shared(), - value->strict_mode(), - scope_position); - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, &key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - - CompilationCacheTable* cache = - reinterpret_cast<CompilationCacheTable*>(obj); +Handle<CompilationCacheTable> CompilationCacheTable::PutEval( + Handle<CompilationCacheTable> cache, Handle<String> src, + Handle<Context> context, Handle<SharedFunctionInfo> value, + int scope_position) { + Isolate* isolate = cache->GetIsolate(); + Handle<SharedFunctionInfo> shared(context->closure()->shared()); + StringSharedKey key(src, shared, value->strict_mode(), scope_position); + cache = EnsureCapacity(cache, 1, &key); + Handle<Object> k = key.AsHandle(isolate); int entry = cache->FindInsertionEntry(key.Hash()); - - Object* k; - { MaybeObject* maybe_k = key.AsObject(GetHeap()); - if (!maybe_k->ToObject(&k)) return maybe_k; - } - - cache->set(EntryToIndex(entry), k); - cache->set(EntryToIndex(entry) + 1, value); + cache->set(EntryToIndex(entry), *k); + cache->set(EntryToIndex(entry) + 1, *value); cache->ElementAdded(); return cache; } -MaybeObject* CompilationCacheTable::PutRegExp(String* src, - JSRegExp::Flags flags, - FixedArray* value) { +Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp( + Handle<CompilationCacheTable> cache, Handle<String> src, + JSRegExp::Flags flags, Handle<FixedArray> value) { RegExpKey key(src, flags); - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, &key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - - CompilationCacheTable* cache = - reinterpret_cast<CompilationCacheTable*>(obj); + cache = EnsureCapacity(cache, 1, &key); int entry = cache->FindInsertionEntry(key.Hash()); // We store the value in the key slot, and compare the search key // to the stored value with a custon IsMatch function during lookups. - cache->set(EntryToIndex(entry), value); - cache->set(EntryToIndex(entry) + 1, value); + cache->set(EntryToIndex(entry), *value); + cache->set(EntryToIndex(entry) + 1, *value); cache->ElementAdded(); return cache; } void CompilationCacheTable::Remove(Object* value) { + DisallowHeapAllocation no_allocation; Object* the_hole_value = GetHeap()->the_hole_value(); for (int entry = 0, size = Capacity(); entry < size; entry++) { int entry_index = EntryToIndex(entry); @@ -15032,9 +15650,9 @@ void CompilationCacheTable::Remove(Object* value) { // StringsKey used for HashTable where key is array of internalized strings. class StringsKey : public HashTableKey { public: - explicit StringsKey(FixedArray* strings) : strings_(strings) { } + explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { } - bool IsMatch(Object* strings) { + bool IsMatch(Object* strings) V8_OVERRIDE { FixedArray* o = FixedArray::cast(strings); int len = strings_->length(); if (o->length() != len) return false; @@ -15044,9 +15662,9 @@ class StringsKey : public HashTableKey { return true; } - uint32_t Hash() { return HashForObject(strings_); } + uint32_t Hash() V8_OVERRIDE { return HashForObject(*strings_); } - uint32_t HashForObject(Object* obj) { + uint32_t HashForObject(Object* obj) V8_OVERRIDE { FixedArray* strings = FixedArray::cast(obj); int len = strings->length(); uint32_t hash = 0; @@ -15056,96 +15674,79 @@ class StringsKey : public HashTableKey { return hash; } - Object* AsObject(Heap* heap) { return strings_; } + Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { return strings_; } private: - FixedArray* strings_; + Handle<FixedArray> strings_; }; Object* MapCache::Lookup(FixedArray* array) { - StringsKey key(array); + DisallowHeapAllocation no_alloc; + StringsKey key(handle(array)); int entry = FindEntry(&key); if (entry == kNotFound) return GetHeap()->undefined_value(); return get(EntryToIndex(entry) + 1); } -MaybeObject* MapCache::Put(FixedArray* array, Map* value) { +Handle<MapCache> MapCache::Put( + Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) { StringsKey key(array); - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, &key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - MapCache* cache = reinterpret_cast<MapCache*>(obj); - int entry = cache->FindInsertionEntry(key.Hash()); - cache->set(EntryToIndex(entry), array); - cache->set(EntryToIndex(entry) + 1, value); - cache->ElementAdded(); - return cache; + Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key); + int entry = new_cache->FindInsertionEntry(key.Hash()); + new_cache->set(EntryToIndex(entry), *array); + new_cache->set(EntryToIndex(entry) + 1, *value); + new_cache->ElementAdded(); + return new_cache; } -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::Allocate(Heap* heap, - int at_least_space_for, - PretenureFlag pretenure) { - Object* obj; - { MaybeObject* maybe_obj = - HashTable<Shape, Key>::Allocate( - heap, - at_least_space_for, - USE_DEFAULT_MINIMUM_CAPACITY, - pretenure); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } +template<typename Derived, typename Shape, typename Key> +Handle<Derived> Dictionary<Derived, Shape, Key>::New( + Isolate* isolate, + int at_least_space_for, + PretenureFlag pretenure) { + ASSERT(0 <= at_least_space_for); + Handle<Derived> dict = DerivedHashTable::New(isolate, + at_least_space_for, + USE_DEFAULT_MINIMUM_CAPACITY, + pretenure); + // Initialize the next enumeration index. - Dictionary<Shape, Key>::cast(obj)-> - SetNextEnumerationIndex(PropertyDetails::kInitialIndex); - return obj; + dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex); + return dict; } -void NameDictionary::DoGenerateNewEnumerationIndices( - Handle<NameDictionary> dictionary) { - CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(), - dictionary->GenerateNewEnumerationIndices()); -} - -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() { - Heap* heap = Dictionary<Shape, Key>::GetHeap(); - int length = HashTable<Shape, Key>::NumberOfElements(); +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices( + Handle<Derived> dictionary) { + Factory* factory = dictionary->GetIsolate()->factory(); + int length = dictionary->NumberOfElements(); // Allocate and initialize iteration order array. - Object* obj; - { MaybeObject* maybe_obj = heap->AllocateFixedArray(length); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* iteration_order = FixedArray::cast(obj); + Handle<FixedArray> iteration_order = factory->NewFixedArray(length); for (int i = 0; i < length; i++) { iteration_order->set(i, Smi::FromInt(i)); } // Allocate array with enumeration order. - { MaybeObject* maybe_obj = heap->AllocateFixedArray(length); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - FixedArray* enumeration_order = FixedArray::cast(obj); + Handle<FixedArray> enumeration_order = factory->NewFixedArray(length); // Fill the enumeration order array with property details. - int capacity = HashTable<Shape, Key>::Capacity(); + int capacity = dictionary->Capacity(); int pos = 0; for (int i = 0; i < capacity; i++) { - if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) { - int index = DetailsAt(i).dictionary_index(); + if (dictionary->IsKey(dictionary->KeyAt(i))) { + int index = dictionary->DetailsAt(i).dictionary_index(); enumeration_order->set(pos++, Smi::FromInt(index)); } } // Sort the arrays wrt. enumeration order. - iteration_order->SortPairs(enumeration_order, enumeration_order->length()); + iteration_order->SortPairs(*enumeration_order, enumeration_order->length()); // Overwrite the enumeration_order with the enumeration indices. for (int i = 0; i < length; i++) { @@ -15155,135 +15756,125 @@ MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() { } // Update the dictionary with new indices. - capacity = HashTable<Shape, Key>::Capacity(); + capacity = dictionary->Capacity(); pos = 0; for (int i = 0; i < capacity; i++) { - if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) { + if (dictionary->IsKey(dictionary->KeyAt(i))) { int enum_index = Smi::cast(enumeration_order->get(pos++))->value(); - PropertyDetails details = DetailsAt(i); + PropertyDetails details = dictionary->DetailsAt(i); PropertyDetails new_details = PropertyDetails( details.attributes(), details.type(), enum_index); - DetailsAtPut(i, new_details); + dictionary->DetailsAtPut(i, new_details); } } // Set the next enumeration index. - SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length); - return this; + dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length); } -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) { + +template<typename Derived, typename Shape, typename Key> +Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity( + Handle<Derived> dictionary, int n, Key key) { // Check whether there are enough enumeration indices to add n elements. if (Shape::kIsEnumerable && - !PropertyDetails::IsValidIndex(NextEnumerationIndex() + n)) { + !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) { // If not, we generate new indices for the properties. - Object* result; - { MaybeObject* maybe_result = GenerateNewEnumerationIndices(); - if (!maybe_result->ToObject(&result)) return maybe_result; - } + GenerateNewEnumerationIndices(dictionary); } - return HashTable<Shape, Key>::EnsureCapacity(n, key); + return DerivedHashTable::EnsureCapacity(dictionary, n, key); } -template<typename Shape, typename Key> -Object* Dictionary<Shape, Key>::DeleteProperty(int entry, - JSReceiver::DeleteMode mode) { - Heap* heap = Dictionary<Shape, Key>::GetHeap(); - PropertyDetails details = DetailsAt(entry); +template<typename Derived, typename Shape, typename Key> +Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty( + Handle<Derived> dictionary, + int entry, + JSObject::DeleteMode mode) { + Factory* factory = dictionary->GetIsolate()->factory(); + PropertyDetails details = dictionary->DetailsAt(entry); // Ignore attributes if forcing a deletion. if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) { - return heap->false_value(); + return factory->false_value(); } - SetEntry(entry, heap->the_hole_value(), heap->the_hole_value()); - HashTable<Shape, Key>::ElementRemoved(); - return heap->true_value(); -} - -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::Shrink(Key key) { - return HashTable<Shape, Key>::Shrink(key); + dictionary->SetEntry( + entry, factory->the_hole_value(), factory->the_hole_value()); + dictionary->ElementRemoved(); + return factory->true_value(); } -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) { - int entry = this->FindEntry(key); +template<typename Derived, typename Shape, typename Key> +Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut( + Handle<Derived> dictionary, Key key, Handle<Object> value) { + int entry = dictionary->FindEntry(key); // If the entry is present set the value; - if (entry != Dictionary<Shape, Key>::kNotFound) { - ValueAtPut(entry, value); - return this; + if (entry != Dictionary::kNotFound) { + dictionary->ValueAtPut(entry, *value); + return dictionary; } // Check whether the dictionary should be extended. - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - - Object* k; - { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key); - if (!maybe_k->ToObject(&k)) return maybe_k; - } + dictionary = EnsureCapacity(dictionary, 1, key); +#ifdef DEBUG + USE(Shape::AsHandle(dictionary->GetIsolate(), key)); +#endif PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); - return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details, - Dictionary<Shape, Key>::Hash(key)); + AddEntry(dictionary, key, value, details, dictionary->Hash(key)); + return dictionary; } -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::Add(Key key, - Object* value, - PropertyDetails details) { +template<typename Derived, typename Shape, typename Key> +Handle<Derived> Dictionary<Derived, Shape, Key>::Add( + Handle<Derived> dictionary, + Key key, + Handle<Object> value, + PropertyDetails details) { // Valdate key is absent. - SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound)); + SLOW_ASSERT((dictionary->FindEntry(key) == Dictionary::kNotFound)); // Check whether the dictionary should be extended. - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } + dictionary = EnsureCapacity(dictionary, 1, key); - return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details, - Dictionary<Shape, Key>::Hash(key)); + AddEntry(dictionary, key, value, details, dictionary->Hash(key)); + return dictionary; } // Add a key, value pair to the dictionary. -template<typename Shape, typename Key> -MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key, - Object* value, - PropertyDetails details, - uint32_t hash) { +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::AddEntry( + Handle<Derived> dictionary, + Key key, + Handle<Object> value, + PropertyDetails details, + uint32_t hash) { // Compute the key object. - Object* k; - { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key); - if (!maybe_k->ToObject(&k)) return maybe_k; - } + Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key); - uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash); + uint32_t entry = dictionary->FindInsertionEntry(hash); // Insert element at empty or deleted entry if (!details.IsDeleted() && details.dictionary_index() == 0 && Shape::kIsEnumerable) { // Assign an enumeration index to the property and update // SetNextEnumerationIndex. - int index = NextEnumerationIndex(); + int index = dictionary->NextEnumerationIndex(); details = PropertyDetails(details.attributes(), details.type(), index); - SetNextEnumerationIndex(index + 1); + dictionary->SetNextEnumerationIndex(index + 1); } - SetEntry(entry, k, value, details); - ASSERT((Dictionary<Shape, Key>::KeyAt(entry)->IsNumber() || - Dictionary<Shape, Key>::KeyAt(entry)->IsName())); - HashTable<Shape, Key>::ElementAdded(); - return this; + dictionary->SetEntry(entry, k, value, details); + ASSERT((dictionary->KeyAt(entry)->IsNumber() || + dictionary->KeyAt(entry)->IsName())); + dictionary->ElementAdded(); } void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) { + DisallowHeapAllocation no_allocation; // If the dictionary requires slow elements an element has already // been added at a high index. if (requires_slow_elements()) return; @@ -15301,105 +15892,86 @@ void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) { } } + Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry( Handle<SeededNumberDictionary> dictionary, uint32_t key, Handle<Object> value, PropertyDetails details) { - CALL_HEAP_FUNCTION(dictionary->GetIsolate(), - dictionary->AddNumberEntry(key, *value, details), - SeededNumberDictionary); + dictionary->UpdateMaxNumberKey(key); + SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); + return Add(dictionary, key, value, details); } -MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key, - Object* value, - PropertyDetails details) { - UpdateMaxNumberKey(key); - SLOW_ASSERT(this->FindEntry(key) == kNotFound); - return Add(key, value, details); -} - -MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key, - Object* value) { - SLOW_ASSERT(this->FindEntry(key) == kNotFound); - return Add(key, value, PropertyDetails(NONE, NORMAL, 0)); +Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry( + Handle<UnseededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value) { + SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); + return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0)); } -MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) { - UpdateMaxNumberKey(key); - return AtPut(key, value); +Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut( + Handle<SeededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value) { + dictionary->UpdateMaxNumberKey(key); + return AtPut(dictionary, key, value); } -MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key, - Object* value) { - return AtPut(key, value); +Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut( + Handle<UnseededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value) { + return AtPut(dictionary, key, value); } Handle<SeededNumberDictionary> SeededNumberDictionary::Set( Handle<SeededNumberDictionary> dictionary, - uint32_t index, + uint32_t key, Handle<Object> value, PropertyDetails details) { - CALL_HEAP_FUNCTION(dictionary->GetIsolate(), - dictionary->Set(index, *value, details), - SeededNumberDictionary); -} - - -Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set( - Handle<UnseededNumberDictionary> dictionary, - uint32_t index, - Handle<Object> value) { - CALL_HEAP_FUNCTION(dictionary->GetIsolate(), - dictionary->Set(index, *value), - UnseededNumberDictionary); -} - - -MaybeObject* SeededNumberDictionary::Set(uint32_t key, - Object* value, - PropertyDetails details) { - int entry = FindEntry(key); - if (entry == kNotFound) return AddNumberEntry(key, value, details); + int entry = dictionary->FindEntry(key); + if (entry == kNotFound) { + return AddNumberEntry(dictionary, key, value, details); + } // Preserve enumeration index. details = PropertyDetails(details.attributes(), details.type(), - DetailsAt(entry).dictionary_index()); - MaybeObject* maybe_object_key = - SeededNumberDictionaryShape::AsObject(GetHeap(), key); - Object* object_key; - if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key; - SetEntry(entry, object_key, value, details); - return this; + dictionary->DetailsAt(entry).dictionary_index()); + Handle<Object> object_key = + SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); + dictionary->SetEntry(entry, object_key, value, details); + return dictionary; } -MaybeObject* UnseededNumberDictionary::Set(uint32_t key, - Object* value) { - int entry = FindEntry(key); - if (entry == kNotFound) return AddNumberEntry(key, value); - MaybeObject* maybe_object_key = - UnseededNumberDictionaryShape::AsObject(GetHeap(), key); - Object* object_key; - if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key; - SetEntry(entry, object_key, value); - return this; +Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set( + Handle<UnseededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value) { + int entry = dictionary->FindEntry(key); + if (entry == kNotFound) return AddNumberEntry(dictionary, key, value); + Handle<Object> object_key = + UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); + dictionary->SetEntry(entry, object_key, value); + return dictionary; } -template<typename Shape, typename Key> -int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes( +template<typename Derived, typename Shape, typename Key> +int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes( PropertyAttributes filter) { - int capacity = HashTable<Shape, Key>::Capacity(); + int capacity = DerivedHashTable::Capacity(); int result = 0; for (int i = 0; i < capacity; i++) { - Object* k = HashTable<Shape, Key>::KeyAt(i); - if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) { + Object* k = DerivedHashTable::KeyAt(i); + if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { PropertyDetails details = DetailsAt(i); if (details.IsDeleted()) continue; PropertyAttributes attr = details.attributes(); @@ -15410,31 +15982,31 @@ int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes( } -template<typename Shape, typename Key> -int Dictionary<Shape, Key>::NumberOfEnumElements() { +template<typename Derived, typename Shape, typename Key> +int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() { return NumberOfElementsFilterAttributes( static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC)); } -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::CopyKeysTo( +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::CopyKeysTo( FixedArray* storage, PropertyAttributes filter, - typename Dictionary<Shape, Key>::SortMode sort_mode) { + typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); - int capacity = HashTable<Shape, Key>::Capacity(); + int capacity = DerivedHashTable::Capacity(); int index = 0; for (int i = 0; i < capacity; i++) { - Object* k = HashTable<Shape, Key>::KeyAt(i); - if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) { + Object* k = DerivedHashTable::KeyAt(i); + if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { PropertyDetails details = DetailsAt(i); if (details.IsDeleted()) continue; PropertyAttributes attr = details.attributes(); if ((attr & filter) == 0) storage->set(index++, k); } } - if (sort_mode == Dictionary<Shape, Key>::SORTED) { + if (sort_mode == Dictionary::SORTED) { storage->SortPairs(storage, index); } ASSERT(storage->length() >= index); @@ -15476,24 +16048,24 @@ void NameDictionary::CopyEnumKeysTo(FixedArray* storage) { } -template<typename Shape, typename Key> -void Dictionary<Shape, Key>::CopyKeysTo( +template<typename Derived, typename Shape, typename Key> +void Dictionary<Derived, Shape, Key>::CopyKeysTo( FixedArray* storage, int index, PropertyAttributes filter, - typename Dictionary<Shape, Key>::SortMode sort_mode) { + typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); - int capacity = HashTable<Shape, Key>::Capacity(); + int capacity = DerivedHashTable::Capacity(); for (int i = 0; i < capacity; i++) { - Object* k = HashTable<Shape, Key>::KeyAt(i); - if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) { + Object* k = DerivedHashTable::KeyAt(i); + if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { PropertyDetails details = DetailsAt(i); if (details.IsDeleted()) continue; PropertyAttributes attr = details.attributes(); if ((attr & filter) == 0) storage->set(index++, k); } } - if (sort_mode == Dictionary<Shape, Key>::SORTED) { + if (sort_mode == Dictionary::SORTED) { storage->SortPairs(storage, index); } ASSERT(storage->length() >= index); @@ -15501,12 +16073,12 @@ void Dictionary<Shape, Key>::CopyKeysTo( // Backwards lookup (slow). -template<typename Shape, typename Key> -Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) { - int capacity = HashTable<Shape, Key>::Capacity(); +template<typename Derived, typename Shape, typename Key> +Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) { + int capacity = DerivedHashTable::Capacity(); for (int i = 0; i < capacity; i++) { - Object* k = HashTable<Shape, Key>::KeyAt(i); - if (Dictionary<Shape, Key>::IsKey(k)) { + Object* k = DerivedHashTable::KeyAt(i); + if (Dictionary::IsKey(k)) { Object* e = ValueAt(i); if (e->IsPropertyCell()) { e = PropertyCell::cast(e)->value(); @@ -15514,344 +16086,608 @@ Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) { if (e == value) return k; } } - Heap* heap = Dictionary<Shape, Key>::GetHeap(); + Heap* heap = Dictionary::GetHeap(); return heap->undefined_value(); } -MaybeObject* NameDictionary::TransformPropertiesToFastFor( - JSObject* obj, int unused_property_fields) { - // Make sure we preserve dictionary representation if there are too many - // descriptors. - int number_of_elements = NumberOfElements(); - if (number_of_elements > kMaxNumberOfDescriptors) return obj; +Object* ObjectHashTable::Lookup(Handle<Object> key) { + DisallowHeapAllocation no_gc; + ASSERT(IsKey(*key)); - if (number_of_elements != NextEnumerationIndex()) { - MaybeObject* maybe_result = GenerateNewEnumerationIndices(); - if (maybe_result->IsFailure()) return maybe_result; + // If the object does not have an identity hash, it was never used as a key. + Object* hash = key->GetHash(); + if (hash->IsUndefined()) { + return GetHeap()->the_hole_value(); } + int entry = FindEntry(key); + if (entry == kNotFound) return GetHeap()->the_hole_value(); + return get(EntryToIndex(entry) + 1); +} - int instance_descriptor_length = 0; - int number_of_fields = 0; - Heap* heap = GetHeap(); +Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, + Handle<Object> key, + Handle<Object> value) { + ASSERT(table->IsKey(*key)); - // Compute the length of the instance descriptor. - int capacity = Capacity(); - for (int i = 0; i < capacity; i++) { - Object* k = KeyAt(i); - if (IsKey(k)) { - Object* value = ValueAt(i); - PropertyType type = DetailsAt(i).type(); - ASSERT(type != FIELD); - instance_descriptor_length++; - if (type == NORMAL && !value->IsJSFunction()) { - number_of_fields += 1; - } - } - } + Isolate* isolate = table->GetIsolate(); - int inobject_props = obj->map()->inobject_properties(); + // Make sure the key object has an identity hash code. + Handle<Object> hash = Object::GetOrCreateHash(key, isolate); - // Allocate new map. - Map* new_map; - MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors(); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; - new_map->set_dictionary_map(false); + int entry = table->FindEntry(key); - if (instance_descriptor_length == 0) { - ASSERT_LE(unused_property_fields, inobject_props); - // Transform the object. - new_map->set_unused_property_fields(inobject_props); - obj->set_map(new_map); - obj->set_properties(heap->empty_fixed_array()); - // Check that it really works. - ASSERT(obj->HasFastProperties()); - return obj; + // Check whether to perform removal operation. + if (value->IsTheHole()) { + if (entry == kNotFound) return table; + table->RemoveEntry(entry); + return Shrink(table, key); } - // Allocate the instance descriptor. - DescriptorArray* descriptors; - MaybeObject* maybe_descriptors = - DescriptorArray::Allocate(GetIsolate(), instance_descriptor_length); - if (!maybe_descriptors->To(&descriptors)) { - return maybe_descriptors; + // Key is already in table, just overwrite value. + if (entry != kNotFound) { + table->set(EntryToIndex(entry) + 1, *value); + return table; } - DescriptorArray::WhitenessWitness witness(descriptors); + // Check whether the hash table should be extended. + table = EnsureCapacity(table, 1, key); + table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()), + *key, + *value); + return table; +} - int number_of_allocated_fields = - number_of_fields + unused_property_fields - inobject_props; - if (number_of_allocated_fields < 0) { - // There is enough inobject space for all fields (including unused). - number_of_allocated_fields = 0; - unused_property_fields = inobject_props - number_of_fields; - } - // Allocate the fixed array for the fields. - FixedArray* fields; - MaybeObject* maybe_fields = - heap->AllocateFixedArray(number_of_allocated_fields); - if (!maybe_fields->To(&fields)) return maybe_fields; +void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { + set(EntryToIndex(entry), key); + set(EntryToIndex(entry) + 1, value); + ElementAdded(); +} - // Fill in the instance descriptor and the fields. - int current_offset = 0; - for (int i = 0; i < capacity; i++) { - Object* k = KeyAt(i); - if (IsKey(k)) { - Object* value = ValueAt(i); - Name* key; - if (k->IsSymbol()) { - key = Symbol::cast(k); - } else { - // Ensure the key is a unique name before writing into the - // instance descriptor. - MaybeObject* maybe_key = heap->InternalizeString(String::cast(k)); - if (!maybe_key->To(&key)) return maybe_key; - } - PropertyDetails details = DetailsAt(i); - int enumeration_index = details.dictionary_index(); - PropertyType type = details.type(); - - if (value->IsJSFunction()) { - ConstantDescriptor d(key, value, details.attributes()); - descriptors->Set(enumeration_index - 1, &d, witness); - } else if (type == NORMAL) { - if (current_offset < inobject_props) { - obj->InObjectPropertyAtPut(current_offset, - value, - UPDATE_WRITE_BARRIER); - } else { - int offset = current_offset - inobject_props; - fields->set(offset, value); - } - FieldDescriptor d(key, - current_offset++, - details.attributes(), - // TODO(verwaest): value->OptimalRepresentation(); - Representation::Tagged()); - descriptors->Set(enumeration_index - 1, &d, witness); - } else if (type == CALLBACKS) { - CallbacksDescriptor d(key, - value, - details.attributes()); - descriptors->Set(enumeration_index - 1, &d, witness); - } else { - UNREACHABLE(); - } - } - } - ASSERT(current_offset == number_of_fields); +void ObjectHashTable::RemoveEntry(int entry) { + set_the_hole(EntryToIndex(entry)); + set_the_hole(EntryToIndex(entry) + 1); + ElementRemoved(); +} - descriptors->Sort(); - new_map->InitializeDescriptors(descriptors); - new_map->set_unused_property_fields(unused_property_fields); +Object* WeakHashTable::Lookup(Handle<Object> key) { + DisallowHeapAllocation no_gc; + ASSERT(IsKey(*key)); + int entry = FindEntry(key); + if (entry == kNotFound) return GetHeap()->the_hole_value(); + return get(EntryToValueIndex(entry)); +} - // Transform the object. - obj->set_map(new_map); - obj->set_properties(fields); - ASSERT(obj->IsJSObject()); +Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table, + Handle<Object> key, + Handle<Object> value) { + ASSERT(table->IsKey(*key)); + int entry = table->FindEntry(key); + // Key is already in table, just overwrite value. + if (entry != kNotFound) { + table->set(EntryToValueIndex(entry), *value); + return table; + } - // Check that it really works. - ASSERT(obj->HasFastProperties()); + // Check whether the hash table should be extended. + table = EnsureCapacity(table, 1, key, TENURED); - return obj; + table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value); + return table; } -Handle<ObjectHashSet> ObjectHashSet::EnsureCapacity( - Handle<ObjectHashSet> table, - int n, - Handle<Object> key, - PretenureFlag pretenure) { - Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table; - CALL_HEAP_FUNCTION(table_base->GetIsolate(), - table_base->EnsureCapacity(n, *key, pretenure), - ObjectHashSet); +void WeakHashTable::AddEntry(int entry, + Handle<Object> key, + Handle<Object> value) { + DisallowHeapAllocation no_allocation; + set(EntryToIndex(entry), *key); + set(EntryToValueIndex(entry), *value); + ElementAdded(); } -Handle<ObjectHashSet> ObjectHashSet::Shrink(Handle<ObjectHashSet> table, - Handle<Object> key) { - Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table; - CALL_HEAP_FUNCTION(table_base->GetIsolate(), - table_base->Shrink(*key), - ObjectHashSet); +template<class Derived, class Iterator, int entrysize> +Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate( + Isolate* isolate, int capacity, PretenureFlag pretenure) { + // Capacity must be a power of two, since we depend on being able + // to divide and multiple by 2 (kLoadFactor) to derive capacity + // from number of buckets. If we decide to change kLoadFactor + // to something other than 2, capacity should be stored as another + // field of this object. + capacity = RoundUpToPowerOf2(Max(kMinCapacity, capacity)); + if (capacity > kMaxCapacity) { + v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); + } + int num_buckets = capacity / kLoadFactor; + Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray( + kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure); + backing_store->set_map_no_write_barrier( + isolate->heap()->ordered_hash_table_map()); + Handle<Derived> table = Handle<Derived>::cast(backing_store); + for (int i = 0; i < num_buckets; ++i) { + table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound)); + } + table->SetNumberOfBuckets(num_buckets); + table->SetNumberOfElements(0); + table->SetNumberOfDeletedElements(0); + table->set_iterators(isolate->heap()->undefined_value()); + return table; } -bool ObjectHashSet::Contains(Object* key) { - ASSERT(IsKey(key)); +template<class Derived, class Iterator, int entrysize> +Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable( + Handle<Derived> table) { + int nof = table->NumberOfElements(); + int nod = table->NumberOfDeletedElements(); + int capacity = table->Capacity(); + if ((nof + nod) < capacity) return table; + // Don't need to grow if we can simply clear out deleted entries instead. + // Note that we can't compact in place, though, so we always allocate + // a new table. + return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity); +} - // If the object does not have an identity hash, it was never used as a key. - Object* hash = key->GetHash(); - if (hash->IsUndefined()) return false; - return (FindEntry(key) != kNotFound); +template<class Derived, class Iterator, int entrysize> +Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink( + Handle<Derived> table) { + int nof = table->NumberOfElements(); + int capacity = table->Capacity(); + if (nof > (capacity >> 2)) return table; + return Rehash(table, capacity / 2); } -Handle<ObjectHashSet> ObjectHashSet::Add(Handle<ObjectHashSet> table, - Handle<Object> key) { - ASSERT(table->IsKey(*key)); - - // Make sure the key object has an identity hash code. - Handle<Object> object_hash = Object::GetOrCreateHash(key, - table->GetIsolate()); +template<class Derived, class Iterator, int entrysize> +Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear( + Handle<Derived> table) { + Handle<Derived> new_table = + Allocate(table->GetIsolate(), + kMinCapacity, + table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); - int entry = table->FindEntry(*key); + new_table->set_iterators(table->iterators()); + table->set_iterators(table->GetHeap()->undefined_value()); - // Check whether key is already present. - if (entry != kNotFound) return table; + DisallowHeapAllocation no_allocation; + for (Object* object = new_table->iterators(); + !object->IsUndefined(); + object = Iterator::cast(object)->next_iterator()) { + Iterator::cast(object)->TableCleared(); + Iterator::cast(object)->set_table(*new_table); + } - // Check whether the hash set should be extended and add entry. - Handle<ObjectHashSet> new_table = - ObjectHashSet::EnsureCapacity(table, 1, key); - entry = new_table->FindInsertionEntry(Smi::cast(*object_hash)->value()); - new_table->set(EntryToIndex(entry), *key); - new_table->ElementAdded(); return new_table; } -Handle<ObjectHashSet> ObjectHashSet::Remove(Handle<ObjectHashSet> table, - Handle<Object> key) { - ASSERT(table->IsKey(*key)); +template<class Derived, class Iterator, int entrysize> +Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( + Handle<Derived> table, int new_capacity) { + Handle<Derived> new_table = + Allocate(table->GetIsolate(), + new_capacity, + table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); + int nof = table->NumberOfElements(); + int nod = table->NumberOfDeletedElements(); + int new_buckets = new_table->NumberOfBuckets(); + int new_entry = 0; + for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) { + Object* key = table->KeyAt(old_entry); + if (key->IsTheHole()) continue; + Object* hash = key->GetHash(); + int bucket = Smi::cast(hash)->value() & (new_buckets - 1); + Object* chain_entry = new_table->get(kHashTableStartIndex + bucket); + new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry)); + int new_index = new_table->EntryToIndex(new_entry); + int old_index = table->EntryToIndex(old_entry); + for (int i = 0; i < entrysize; ++i) { + Object* value = table->get(old_index + i); + new_table->set(new_index + i, value); + } + new_table->set(new_index + kChainOffset, chain_entry); + ++new_entry; + } + new_table->SetNumberOfElements(nof); + + new_table->set_iterators(table->iterators()); + table->set_iterators(table->GetHeap()->undefined_value()); - // If the object does not have an identity hash, it was never used as a key. - if (key->GetHash()->IsUndefined()) return table; + DisallowHeapAllocation no_allocation; + for (Object* object = new_table->iterators(); + !object->IsUndefined(); + object = Iterator::cast(object)->next_iterator()) { + Iterator::cast(object)->TableCompacted(); + Iterator::cast(object)->set_table(*new_table); + } - int entry = table->FindEntry(*key); + return new_table; +} - // Check whether key is actually present. - if (entry == kNotFound) return table; - // Remove entry and try to shrink this hash set. - table->set_the_hole(EntryToIndex(entry)); - table->ElementRemoved(); +template<class Derived, class Iterator, int entrysize> +int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( + Handle<Object> key) { + DisallowHeapAllocation no_gc; + ASSERT(!key->IsTheHole()); + Object* hash = key->GetHash(); + if (hash->IsUndefined()) return kNotFound; + for (int entry = HashToEntry(Smi::cast(hash)->value()); + entry != kNotFound; + entry = ChainAt(entry)) { + Object* candidate = KeyAt(entry); + if (candidate->SameValue(*key)) + return entry; + } + return kNotFound; +} - return ObjectHashSet::Shrink(table, key); + +template<class Derived, class Iterator, int entrysize> +int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) { + int entry = UsedCapacity(); + int bucket = HashToBucket(hash); + int index = EntryToIndex(entry); + Object* chain_entry = get(kHashTableStartIndex + bucket); + set(kHashTableStartIndex + bucket, Smi::FromInt(entry)); + set(index + kChainOffset, chain_entry); + SetNumberOfElements(NumberOfElements() + 1); + return index; } -Handle<ObjectHashTable> ObjectHashTable::EnsureCapacity( - Handle<ObjectHashTable> table, - int n, - Handle<Object> key, - PretenureFlag pretenure) { - Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table; - CALL_HEAP_FUNCTION(table_base->GetIsolate(), - table_base->EnsureCapacity(n, *key, pretenure), - ObjectHashTable); +template<class Derived, class Iterator, int entrysize> +void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) { + int index = EntryToIndex(entry); + for (int i = 0; i < entrysize; ++i) { + set_the_hole(index + i); + } + SetNumberOfElements(NumberOfElements() - 1); + SetNumberOfDeletedElements(NumberOfDeletedElements() + 1); + + DisallowHeapAllocation no_allocation; + for (Object* object = iterators(); + !object->IsUndefined(); + object = Iterator::cast(object)->next_iterator()) { + Iterator::cast(object)->EntryRemoved(entry); + } } -Handle<ObjectHashTable> ObjectHashTable::Shrink( - Handle<ObjectHashTable> table, Handle<Object> key) { - Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table; - CALL_HEAP_FUNCTION(table_base->GetIsolate(), - table_base->Shrink(*key), - ObjectHashTable); +template Handle<OrderedHashSet> +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate( + Isolate* isolate, int capacity, PretenureFlag pretenure); + +template Handle<OrderedHashSet> +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable( + Handle<OrderedHashSet> table); + +template Handle<OrderedHashSet> +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink( + Handle<OrderedHashSet> table); + +template Handle<OrderedHashSet> +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear( + Handle<OrderedHashSet> table); + +template int +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry( + Handle<Object> key); + +template int +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash); + +template void +OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry); + + +template Handle<OrderedHashMap> +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate( + Isolate* isolate, int capacity, PretenureFlag pretenure); + +template Handle<OrderedHashMap> +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable( + Handle<OrderedHashMap> table); + +template Handle<OrderedHashMap> +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink( + Handle<OrderedHashMap> table); + +template Handle<OrderedHashMap> +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear( + Handle<OrderedHashMap> table); + +template int +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry( + Handle<Object> key); + +template int +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash); + +template void +OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry); + + +bool OrderedHashSet::Contains(Handle<Object> key) { + return FindEntry(key) != kNotFound; } -Object* ObjectHashTable::Lookup(Object* key) { - ASSERT(IsKey(key)); +Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table, + Handle<Object> key) { + if (table->FindEntry(key) != kNotFound) return table; - // If the object does not have an identity hash, it was never used as a key. - Object* hash = key->GetHash(); - if (hash->IsUndefined()) { - return GetHeap()->the_hole_value(); - } - int entry = FindEntry(key); - if (entry == kNotFound) return GetHeap()->the_hole_value(); - return get(EntryToIndex(entry) + 1); + table = EnsureGrowable(table); + + Handle<Object> hash = GetOrCreateHash(key, table->GetIsolate()); + int index = table->AddEntry(Smi::cast(*hash)->value()); + table->set(index, *key); + return table; } -Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, - Handle<Object> key, - Handle<Object> value) { - ASSERT(table->IsKey(*key)); +Handle<OrderedHashSet> OrderedHashSet::Remove(Handle<OrderedHashSet> table, + Handle<Object> key) { + int entry = table->FindEntry(key); + if (entry == kNotFound) return table; + table->RemoveEntry(entry); + return Shrink(table); +} - Isolate* isolate = table->GetIsolate(); - // Make sure the key object has an identity hash code. - Handle<Object> hash = Object::GetOrCreateHash(key, isolate); +Object* OrderedHashMap::Lookup(Handle<Object> key) { + DisallowHeapAllocation no_gc; + int entry = FindEntry(key); + if (entry == kNotFound) return GetHeap()->the_hole_value(); + return ValueAt(entry); +} - int entry = table->FindEntry(*key); - // Check whether to perform removal operation. +Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table, + Handle<Object> key, + Handle<Object> value) { + int entry = table->FindEntry(key); + if (value->IsTheHole()) { if (entry == kNotFound) return table; table->RemoveEntry(entry); - return Shrink(table, key); + return Shrink(table); } - // Key is already in table, just overwrite value. if (entry != kNotFound) { - table->set(EntryToIndex(entry) + 1, *value); + table->set(table->EntryToIndex(entry) + kValueOffset, *value); return table; } - // Check whether the hash table should be extended. - table = EnsureCapacity(table, 1, key); - table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()), - *key, - *value); + table = EnsureGrowable(table); + + Handle<Object> hash = GetOrCreateHash(key, table->GetIsolate()); + int index = table->AddEntry(Smi::cast(*hash)->value()); + table->set(index, *key); + table->set(index + kValueOffset, *value); return table; } -void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { - set(EntryToIndex(entry), key); - set(EntryToIndex(entry) + 1, value); - ElementAdded(); +template<class Derived, class TableType> +void OrderedHashTableIterator<Derived, TableType>::EntryRemoved(int index) { + int i = this->index()->value(); + if (index < i) { + set_count(Smi::FromInt(count()->value() - 1)); + } + if (index == i) { + Seek(); + } } -void ObjectHashTable::RemoveEntry(int entry) { - set_the_hole(EntryToIndex(entry)); - set_the_hole(EntryToIndex(entry) + 1); - ElementRemoved(); +template<class Derived, class TableType> +void OrderedHashTableIterator<Derived, TableType>::Close() { + if (Closed()) return; + + DisallowHeapAllocation no_allocation; + + Object* undefined = GetHeap()->undefined_value(); + TableType* table = TableType::cast(this->table()); + Object* previous = previous_iterator(); + Object* next = next_iterator(); + + if (previous == undefined) { + ASSERT_EQ(table->iterators(), this); + table->set_iterators(next); + } else { + ASSERT_EQ(Derived::cast(previous)->next_iterator(), this); + Derived::cast(previous)->set_next_iterator(next); + } + + if (!next->IsUndefined()) { + ASSERT_EQ(Derived::cast(next)->previous_iterator(), this); + Derived::cast(next)->set_previous_iterator(previous); + } + + set_previous_iterator(undefined); + set_next_iterator(undefined); + set_table(undefined); } -Object* WeakHashTable::Lookup(Object* key) { - ASSERT(IsKey(key)); - int entry = FindEntry(key); - if (entry == kNotFound) return GetHeap()->the_hole_value(); - return get(EntryToValueIndex(entry)); +template<class Derived, class TableType> +void OrderedHashTableIterator<Derived, TableType>::Seek() { + ASSERT(!Closed()); + + DisallowHeapAllocation no_allocation; + + int index = this->index()->value(); + + TableType* table = TableType::cast(this->table()); + int used_capacity = table->UsedCapacity(); + + while (index < used_capacity && table->KeyAt(index)->IsTheHole()) { + index++; + } + set_index(Smi::FromInt(index)); } -MaybeObject* WeakHashTable::Put(Object* key, Object* value) { - ASSERT(IsKey(key)); - int entry = FindEntry(key); - // Key is already in table, just overwrite value. - if (entry != kNotFound) { - set(EntryToValueIndex(entry), value); - return this; +template<class Derived, class TableType> +void OrderedHashTableIterator<Derived, TableType>::MoveNext() { + ASSERT(!Closed()); + + set_index(Smi::FromInt(index()->value() + 1)); + set_count(Smi::FromInt(count()->value() + 1)); + Seek(); +} + + +template<class Derived, class TableType> +Handle<JSObject> OrderedHashTableIterator<Derived, TableType>::Next( + Handle<Derived> iterator) { + Isolate* isolate = iterator->GetIsolate(); + Factory* factory = isolate->factory(); + + Handle<Object> object(iterator->table(), isolate); + + if (!object->IsUndefined()) { + Handle<TableType> table = Handle<TableType>::cast(object); + int index = iterator->index()->value(); + if (index < table->UsedCapacity()) { + int entry_index = table->EntryToIndex(index); + iterator->MoveNext(); + Handle<Object> value = Derived::ValueForKind(iterator, entry_index); + return factory->NewIteratorResultObject(value, false); + } else { + iterator->Close(); + } } - // Check whether the hash table should be extended. - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, key, TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + return factory->NewIteratorResultObject(factory->undefined_value(), true); +} + + +template<class Derived, class TableType> +Handle<Derived> OrderedHashTableIterator<Derived, TableType>::CreateInternal( + Handle<Map> map, + Handle<TableType> table, + int kind) { + Isolate* isolate = table->GetIsolate(); + + Handle<Object> undefined = isolate->factory()->undefined_value(); + + Handle<Derived> new_iterator = Handle<Derived>::cast( + isolate->factory()->NewJSObjectFromMap(map)); + new_iterator->set_previous_iterator(*undefined); + new_iterator->set_table(*table); + new_iterator->set_index(Smi::FromInt(0)); + new_iterator->set_count(Smi::FromInt(0)); + new_iterator->set_kind(Smi::FromInt(kind)); + + Handle<Object> old_iterator(table->iterators(), isolate); + if (!old_iterator->IsUndefined()) { + Handle<Derived>::cast(old_iterator)->set_previous_iterator(*new_iterator); + new_iterator->set_next_iterator(*old_iterator); + } else { + new_iterator->set_next_iterator(*undefined); } - WeakHashTable* table = WeakHashTable::cast(obj); - table->AddEntry(table->FindInsertionEntry(Hash(key)), key, value); - return table; + + table->set_iterators(*new_iterator); + + return new_iterator; } -void WeakHashTable::AddEntry(int entry, Object* key, Object* value) { - set(EntryToIndex(entry), key); - set(EntryToValueIndex(entry), value); - ElementAdded(); +template void +OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::EntryRemoved( + int index); + +template void +OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Close(); + +template Handle<JSObject> +OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next( + Handle<JSSetIterator> iterator); + +template Handle<JSSetIterator> +OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CreateInternal( + Handle<Map> map, Handle<OrderedHashSet> table, int kind); + + +template void +OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::EntryRemoved( + int index); + +template void +OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Close(); + +template Handle<JSObject> +OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next( + Handle<JSMapIterator> iterator); + +template Handle<JSMapIterator> +OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CreateInternal( + Handle<Map> map, Handle<OrderedHashMap> table, int kind); + + +Handle<Object> JSSetIterator::ValueForKind( + Handle<JSSetIterator> iterator, int entry_index) { + int kind = iterator->kind()->value(); + // Set.prototype only has values and entries. + ASSERT(kind == kKindValues || kind == kKindEntries); + + Isolate* isolate = iterator->GetIsolate(); + Factory* factory = isolate->factory(); + + Handle<OrderedHashSet> table( + OrderedHashSet::cast(iterator->table()), isolate); + Handle<Object> value = Handle<Object>(table->get(entry_index), isolate); + + if (kind == kKindEntries) { + Handle<FixedArray> array = factory->NewFixedArray(2); + array->set(0, *value); + array->set(1, *value); + return factory->NewJSArrayWithElements(array); + } + + return value; +} + + +Handle<Object> JSMapIterator::ValueForKind( + Handle<JSMapIterator> iterator, int entry_index) { + int kind = iterator->kind()->value(); + ASSERT(kind == kKindKeys || kind == kKindValues || kind == kKindEntries); + + Isolate* isolate = iterator->GetIsolate(); + Factory* factory = isolate->factory(); + + Handle<OrderedHashMap> table( + OrderedHashMap::cast(iterator->table()), isolate); + + switch (kind) { + case kKindKeys: + return Handle<Object>(table->get(entry_index), isolate); + + case kKindValues: + return Handle<Object>(table->get(entry_index + 1), isolate); + + case kKindEntries: { + Handle<Object> key(table->get(entry_index), isolate); + Handle<Object> value(table->get(entry_index + 1), isolate); + Handle<FixedArray> array = factory->NewFixedArray(2); + array->set(0, *key); + array->set(1, *value); + return factory->NewJSArrayWithElements(array); + } + } + + UNREACHABLE(); + return factory->undefined_value(); } @@ -15907,7 +16743,6 @@ Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create( } -#ifdef ENABLE_DEBUGGER_SUPPORT // Check if there is a break point at this code position. bool DebugInfo::HasBreakPoint(int code_position) { // Get the break point info object for this code position. @@ -16161,7 +16996,6 @@ int BreakPointInfo::GetBreakPointCount() { // Multiple break points. return FixedArray::cast(break_point_objects())->length(); } -#endif // ENABLE_DEBUGGER_SUPPORT Object* JSDate::GetField(Object* object, Smi* index) { @@ -16342,6 +17176,10 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind())); + Handle<Map> new_map = Map::TransitionElementsTo( + map, + FixedToExternalElementsKind(map->elements_kind())); + Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); Handle<FixedTypedArrayBase> fixed_typed_array( FixedTypedArrayBase::cast(typed_array->elements())); @@ -16354,14 +17192,11 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( isolate->factory()->NewExternalArray( fixed_typed_array->length(), typed_array->type(), static_cast<uint8_t*>(buffer->backing_store())); - Handle<Map> new_map = JSObject::GetElementsTransitionMap( - typed_array, - FixedToExternalElementsKind(map->elements_kind())); buffer->set_weak_first_view(*typed_array); ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value()); typed_array->set_buffer(*buffer); - typed_array->set_map_and_elements(*new_map, *new_elements); + JSObject::SetMapAndElements(typed_array, new_map, new_elements); return buffer; } @@ -16423,14 +17258,16 @@ void PropertyCell::SetValueInferType(Handle<PropertyCell> cell, } -void PropertyCell::AddDependentCompilationInfo(CompilationInfo* info) { - Handle<DependentCode> dep(dependent_code()); +// static +void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell, + CompilationInfo* info) { Handle<DependentCode> codes = - DependentCode::Insert(dep, DependentCode::kPropertyCellChangedGroup, + DependentCode::Insert(handle(cell->dependent_code(), info->isolate()), + DependentCode::kPropertyCellChangedGroup, info->object_wrapper()); - if (*codes != dependent_code()) set_dependent_code(*codes); + if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes); info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add( - Handle<HeapObject>(this), info->zone()); + cell, info->zone()); } diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index e3ed08c4d..12f2ee0e5 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_OBJECTS_H_ #define V8_OBJECTS_H_ @@ -52,111 +29,113 @@ // Most object types in the V8 JavaScript are described in this file. // // Inheritance hierarchy: -// - MaybeObject (an object or a failure) -// - Failure (immediate for marking failed operation) -// - Object -// - Smi (immediate small integer) -// - HeapObject (superclass for everything allocated in the heap) -// - JSReceiver (suitable for property access) -// - JSObject -// - JSArray -// - JSArrayBuffer -// - JSArrayBufferView -// - JSTypedArray -// - JSDataView -// - JSSet -// - JSMap -// - JSWeakCollection -// - JSWeakMap -// - JSWeakSet -// - JSRegExp -// - JSFunction -// - JSGeneratorObject -// - JSModule -// - GlobalObject -// - JSGlobalObject -// - JSBuiltinsObject -// - JSGlobalProxy -// - JSValue -// - JSDate -// - JSMessageObject -// - JSProxy -// - JSFunctionProxy -// - FixedArrayBase -// - ByteArray -// - FixedArray -// - DescriptorArray -// - HashTable -// - Dictionary -// - StringTable -// - CompilationCacheTable -// - CodeCacheHashTable -// - MapCache -// - Context -// - JSFunctionResultCache -// - ScopeInfo -// - TransitionArray -// - FixedDoubleArray -// - ExternalArray -// - ExternalUint8ClampedArray -// - ExternalInt8Array -// - ExternalUint8Array -// - ExternalInt16Array -// - ExternalUint16Array -// - ExternalInt32Array -// - ExternalUint32Array -// - ExternalFloat32Array -// - Name -// - String -// - SeqString -// - SeqOneByteString -// - SeqTwoByteString -// - SlicedString -// - ConsString -// - ExternalString -// - ExternalAsciiString -// - ExternalTwoByteString -// - InternalizedString -// - SeqInternalizedString -// - SeqOneByteInternalizedString -// - SeqTwoByteInternalizedString -// - ConsInternalizedString -// - ExternalInternalizedString -// - ExternalAsciiInternalizedString -// - ExternalTwoByteInternalizedString -// - Symbol -// - HeapNumber -// - Cell -// - PropertyCell -// - Code -// - Map -// - Oddball -// - Foreign -// - SharedFunctionInfo -// - Struct -// - Box -// - DeclaredAccessorDescriptor -// - AccessorInfo -// - DeclaredAccessorInfo -// - ExecutableAccessorInfo -// - AccessorPair -// - AccessCheckInfo -// - InterceptorInfo -// - CallHandlerInfo -// - TemplateInfo -// - FunctionTemplateInfo -// - ObjectTemplateInfo -// - Script -// - SignatureInfo -// - TypeSwitchInfo -// - DebugInfo -// - BreakPointInfo -// - CodeCache +// - Object +// - Smi (immediate small integer) +// - HeapObject (superclass for everything allocated in the heap) +// - JSReceiver (suitable for property access) +// - JSObject +// - JSArray +// - JSArrayBuffer +// - JSArrayBufferView +// - JSTypedArray +// - JSDataView +// - JSSet +// - JSMap +// - JSSetIterator +// - JSMapIterator +// - JSWeakCollection +// - JSWeakMap +// - JSWeakSet +// - JSRegExp +// - JSFunction +// - JSGeneratorObject +// - JSModule +// - GlobalObject +// - JSGlobalObject +// - JSBuiltinsObject +// - JSGlobalProxy +// - JSValue +// - JSDate +// - JSMessageObject +// - JSProxy +// - JSFunctionProxy +// - FixedArrayBase +// - ByteArray +// - FixedArray +// - DescriptorArray +// - HashTable +// - Dictionary +// - StringTable +// - CompilationCacheTable +// - CodeCacheHashTable +// - MapCache +// - OrderedHashTable +// - OrderedHashSet +// - OrderedHashMap +// - Context +// - JSFunctionResultCache +// - ScopeInfo +// - TransitionArray +// - FixedDoubleArray +// - ExternalArray +// - ExternalUint8ClampedArray +// - ExternalInt8Array +// - ExternalUint8Array +// - ExternalInt16Array +// - ExternalUint16Array +// - ExternalInt32Array +// - ExternalUint32Array +// - ExternalFloat32Array +// - Name +// - String +// - SeqString +// - SeqOneByteString +// - SeqTwoByteString +// - SlicedString +// - ConsString +// - ExternalString +// - ExternalAsciiString +// - ExternalTwoByteString +// - InternalizedString +// - SeqInternalizedString +// - SeqOneByteInternalizedString +// - SeqTwoByteInternalizedString +// - ConsInternalizedString +// - ExternalInternalizedString +// - ExternalAsciiInternalizedString +// - ExternalTwoByteInternalizedString +// - Symbol +// - HeapNumber +// - Cell +// - PropertyCell +// - Code +// - Map +// - Oddball +// - Foreign +// - SharedFunctionInfo +// - Struct +// - Box +// - DeclaredAccessorDescriptor +// - AccessorInfo +// - DeclaredAccessorInfo +// - ExecutableAccessorInfo +// - AccessorPair +// - AccessCheckInfo +// - InterceptorInfo +// - CallHandlerInfo +// - TemplateInfo +// - FunctionTemplateInfo +// - ObjectTemplateInfo +// - Script +// - SignatureInfo +// - TypeSwitchInfo +// - DebugInfo +// - BreakPointInfo +// - CodeCache // // Formats of Object*: // Smi: [31 bit signed int] 0 // HeapObject: [32 bit direct pointer] (4 byte aligned) | 01 -// Failure: [30 bit signed int] 11 namespace v8 { namespace internal { @@ -355,8 +334,6 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; \ V(INTERNALIZED_STRING_TYPE) \ V(ASCII_INTERNALIZED_STRING_TYPE) \ - V(CONS_INTERNALIZED_STRING_TYPE) \ - V(CONS_ASCII_INTERNALIZED_STRING_TYPE) \ V(EXTERNAL_INTERNALIZED_STRING_TYPE) \ V(EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE) \ V(EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE) \ @@ -444,6 +421,8 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; V(JS_PROXY_TYPE) \ V(JS_SET_TYPE) \ V(JS_MAP_TYPE) \ + V(JS_SET_ITERATOR_TYPE) \ + V(JS_MAP_ITERATOR_TYPE) \ V(JS_WEAK_MAP_TYPE) \ V(JS_WEAK_SET_TYPE) \ V(JS_REGEXP_TYPE) \ @@ -514,14 +493,6 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; kVariableSizeSentinel, \ ascii_internalized_string, \ AsciiInternalizedString) \ - V(CONS_INTERNALIZED_STRING_TYPE, \ - ConsString::kSize, \ - cons_internalized_string, \ - ConsInternalizedString) \ - V(CONS_ASCII_INTERNALIZED_STRING_TYPE, \ - ConsString::kSize, \ - cons_ascii_internalized_string, \ - ConsAsciiInternalizedString) \ V(EXTERNAL_INTERNALIZED_STRING_TYPE, \ ExternalTwoByteString::kSize, \ external_internalized_string, \ @@ -556,7 +527,7 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; // Note that for subtle reasons related to the ordering or numerical values of // type tags, elements in this list have to be added to the INSTANCE_TYPE_LIST // manually. -#define STRUCT_LIST_ALL(V) \ +#define STRUCT_LIST(V) \ V(BOX, Box, box) \ V(DECLARED_ACCESSOR_DESCRIPTOR, \ DeclaredAccessorDescriptor, \ @@ -577,19 +548,9 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; V(CODE_CACHE, CodeCache, code_cache) \ V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache) \ V(TYPE_FEEDBACK_INFO, TypeFeedbackInfo, type_feedback_info) \ - V(ALIASED_ARGUMENTS_ENTRY, AliasedArgumentsEntry, aliased_arguments_entry) - -#ifdef ENABLE_DEBUGGER_SUPPORT -#define STRUCT_LIST_DEBUGGER(V) \ + V(ALIASED_ARGUMENTS_ENTRY, AliasedArgumentsEntry, aliased_arguments_entry) \ V(DEBUG_INFO, DebugInfo, debug_info) \ V(BREAK_POINT_INFO, BreakPointInfo, break_point_info) -#else -#define STRUCT_LIST_DEBUGGER(V) -#endif - -#define STRUCT_LIST(V) \ - STRUCT_LIST_ALL(V) \ - STRUCT_LIST_DEBUGGER(V) // We use the full 8 bits of the instance_type field to encode heap object // instance types. The high-order bit (bit 7) is set if the object is not a @@ -631,7 +592,7 @@ STATIC_ASSERT( // Use this mask to distinguish between cons and slice only after making // sure that the string is one of the two (an indirect string). const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag; -STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0); +STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask)); // If bit 7 is clear, then bit 3 indicates whether this two-byte // string actually contains one byte data. @@ -662,10 +623,6 @@ enum InstanceType { | kInternalizedTag, ASCII_INTERNALIZED_STRING_TYPE = kOneByteStringTag | kSeqStringTag | kInternalizedTag, - CONS_INTERNALIZED_STRING_TYPE = kTwoByteStringTag | kConsStringTag - | kInternalizedTag, - CONS_ASCII_INTERNALIZED_STRING_TYPE = kOneByteStringTag | kConsStringTag - | kInternalizedTag, EXTERNAL_INTERNALIZED_STRING_TYPE = kTwoByteStringTag | kExternalStringTag | kInternalizedTag, EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE = kOneByteStringTag @@ -685,9 +642,9 @@ enum InstanceType { STRING_TYPE = INTERNALIZED_STRING_TYPE | kNotInternalizedTag, ASCII_STRING_TYPE = ASCII_INTERNALIZED_STRING_TYPE | kNotInternalizedTag, - CONS_STRING_TYPE = CONS_INTERNALIZED_STRING_TYPE | kNotInternalizedTag, + CONS_STRING_TYPE = kTwoByteStringTag | kConsStringTag | kNotInternalizedTag, CONS_ASCII_STRING_TYPE = - CONS_ASCII_INTERNALIZED_STRING_TYPE | kNotInternalizedTag, + kOneByteStringTag | kConsStringTag | kNotInternalizedTag, SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag | kNotInternalizedTag, @@ -768,10 +725,6 @@ enum InstanceType { TYPE_FEEDBACK_INFO_TYPE, ALIASED_ARGUMENTS_ENTRY_TYPE, BOX_TYPE, - // The following two instance types are only used when ENABLE_DEBUGGER_SUPPORT - // is defined. However as include/v8.h contain some of the instance type - // constants always having them avoids them getting different numbers - // depending on whether ENABLE_DEBUGGER_SUPPORT is defined or not. DEBUG_INFO_TYPE, BREAK_POINT_INFO_TYPE, @@ -779,8 +732,6 @@ enum InstanceType { CONSTANT_POOL_ARRAY_TYPE, SHARED_FUNCTION_INFO_TYPE, - JS_MESSAGE_OBJECT_TYPE, - // All the following types are subtypes of JSReceiver, which corresponds to // objects in the JS sense. The first and the last type in this range are // the two forms of function. This organization enables using the same @@ -790,6 +741,7 @@ enum InstanceType { JS_PROXY_TYPE, // LAST_JS_PROXY_TYPE JS_VALUE_TYPE, // FIRST_JS_OBJECT_TYPE + JS_MESSAGE_OBJECT_TYPE, JS_DATE_TYPE, JS_OBJECT_TYPE, JS_CONTEXT_EXTENSION_OBJECT_TYPE, @@ -804,6 +756,8 @@ enum InstanceType { JS_DATA_VIEW_TYPE, JS_SET_TYPE, JS_MAP_TYPE, + JS_SET_ITERATOR_TYPE, + JS_MAP_ITERATOR_TYPE, JS_WEAK_MAP_TYPE, JS_WEAK_SET_TYPE, @@ -904,7 +858,6 @@ class AllocationSiteCreationContext; class AllocationSiteUsageContext; class DictionaryElementsAccessor; class ElementsAccessor; -class Failure; class FixedArrayBase; class GlobalObject; class ObjectVisitor; @@ -930,60 +883,6 @@ template <class C> inline bool Is(Object* obj); #define DECLARE_PRINTER(Name) #endif -class MaybeObject BASE_EMBEDDED { - public: - inline bool IsFailure(); - inline bool IsRetryAfterGC(); - inline bool IsException(); - INLINE(bool IsTheHole()); - INLINE(bool IsUninitialized()); - inline bool ToObject(Object** obj) { - if (IsFailure()) return false; - *obj = reinterpret_cast<Object*>(this); - return true; - } - inline Failure* ToFailureUnchecked() { - ASSERT(IsFailure()); - return reinterpret_cast<Failure*>(this); - } - inline Object* ToObjectUnchecked() { - // TODO(jkummerow): Turn this back into an ASSERT when we can be certain - // that it never fires in Release mode in the wild. - CHECK(!IsFailure()); - return reinterpret_cast<Object*>(this); - } - inline Object* ToObjectChecked() { - CHECK(!IsFailure()); - return reinterpret_cast<Object*>(this); - } - - template<typename T> - inline bool To(T** obj) { - if (IsFailure()) return false; - *obj = T::cast(reinterpret_cast<Object*>(this)); - return true; - } - - template<typename T> - inline bool ToHandle(Handle<T>* obj, Isolate* isolate) { - if (IsFailure()) return false; - *obj = handle(T::cast(reinterpret_cast<Object*>(this)), isolate); - return true; - } - -#ifdef OBJECT_PRINT - // Prints this object with details. - void Print(); - void Print(FILE* out); - void PrintLn(); - void PrintLn(FILE* out); -#endif -#ifdef VERIFY_HEAP - // Verifies the object. - void Verify(); -#endif -}; - #define OBJECT_TYPE_LIST(V) \ V(Smi) \ @@ -1064,6 +963,8 @@ class MaybeObject BASE_EMBEDDED { V(JSFunctionProxy) \ V(JSSet) \ V(JSMap) \ + V(JSSetIterator) \ + V(JSMapIterator) \ V(JSWeakCollection) \ V(JSWeakMap) \ V(JSWeakSet) \ @@ -1087,7 +988,8 @@ class MaybeObject BASE_EMBEDDED { V(Cell) \ V(PropertyCell) \ V(ObjectHashTable) \ - V(WeakHashTable) + V(WeakHashTable) \ + V(OrderedHashTable) #define ERROR_MESSAGES_LIST(V) \ @@ -1254,6 +1156,8 @@ class MaybeObject BASE_EMBEDDED { V(kLiveEdit, "LiveEdit") \ V(kLookupVariableInCountOperation, \ "Lookup variable in count operation") \ + V(kMapBecameDeprecated, "Map became deprecated") \ + V(kMapBecameUnstable, "Map became unstable") \ V(kMapIsNoLongerInEax, "Map is no longer in eax") \ V(kModuleDeclaration, "Module declaration") \ V(kModuleLiteral, "Module literal") \ @@ -1292,6 +1196,8 @@ class MaybeObject BASE_EMBEDDED { V(kOperandIsNotAString, "Operand is not a string") \ V(kOperandIsNotSmi, "Operand is not smi") \ V(kOperandNotANumber, "Operand not a number") \ + V(kObjectTagged, "The object is tagged") \ + V(kObjectNotTagged, "The object is not tagged") \ V(kOptimizationDisabled, "Optimization is disabled") \ V(kOptimizedTooManyTimes, "Optimized too many times") \ V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister, \ @@ -1385,6 +1291,7 @@ class MaybeObject BASE_EMBEDDED { V(kUnexpectedNegativeValue, "Unexpected negative value") \ V(kUnexpectedNumberOfPreAllocatedPropertyFields, \ "Unexpected number of pre-allocated property fields") \ + V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \ V(kUnexpectedSmi, "Unexpected smi value") \ V(kUnexpectedStringFunction, "Unexpected String function") \ V(kUnexpectedStringType, "Unexpected string type") \ @@ -1435,9 +1342,9 @@ const char* GetBailoutReason(BailoutReason reason); // object hierarchy. // Object does not use any virtual functions to avoid the // allocation of the C++ vtable. -// Since Smi and Failure are subclasses of Object no +// Since both Smi and HeapObject are subclasses of Object no // data members can be present in Object. -class Object : public MaybeObject { +class Object { public: // Type testing. bool IsObject() { return true; } @@ -1458,17 +1365,18 @@ class Object : public MaybeObject { INLINE(bool IsSpecObject()); INLINE(bool IsSpecFunction()); + INLINE(bool IsTemplateInfo()); bool IsCallable(); // Oddball testing. INLINE(bool IsUndefined()); INLINE(bool IsNull()); - INLINE(bool IsTheHole()); // Shadows MaybeObject's implementation. + INLINE(bool IsTheHole()); + INLINE(bool IsException()); INLINE(bool IsUninitialized()); INLINE(bool IsTrue()); INLINE(bool IsFalse()); inline bool IsArgumentsMarker(); - inline bool NonFailureIsHeapObject(); // Filler objects (fillers and free space objects). inline bool IsFiller(); @@ -1517,8 +1425,11 @@ class Object : public MaybeObject { return true; } - inline MaybeObject* AllocateNewStorageFor(Heap* heap, - Representation representation); + Handle<HeapType> OptimalType(Isolate* isolate, Representation representation); + + inline static Handle<Object> NewStorageFor(Isolate* isolate, + Handle<Object> object, + Representation representation); // Returns true if the object is of the correct type to be used as a // implementation of a JSObject's elements. @@ -1526,77 +1437,63 @@ class Object : public MaybeObject { inline bool HasSpecificClassOf(String* name); - MUST_USE_RESULT MaybeObject* ToObject(Isolate* isolate); // ECMA-262 9.9. bool BooleanValue(); // ECMA-262 9.2. // Convert to a JSObject if needed. // native_context is used when creating wrapper object. - MUST_USE_RESULT MaybeObject* ToObject(Context* native_context); + static inline MaybeHandle<JSReceiver> ToObject(Isolate* isolate, + Handle<Object> object); + static MaybeHandle<JSReceiver> ToObject(Isolate* isolate, + Handle<Object> object, + Handle<Context> context); // Converts this to a Smi if possible. - // Failure is returned otherwise. - static MUST_USE_RESULT inline Handle<Object> ToSmi(Isolate* isolate, - Handle<Object> object); - MUST_USE_RESULT inline MaybeObject* ToSmi(); + static MUST_USE_RESULT inline MaybeHandle<Smi> ToSmi(Isolate* isolate, + Handle<Object> object); - void Lookup(Name* name, LookupResult* result); - - // Property access. - MUST_USE_RESULT inline MaybeObject* GetProperty(Name* key); - MUST_USE_RESULT inline MaybeObject* GetProperty( - Name* key, - PropertyAttributes* attributes); + void Lookup(Handle<Name> name, LookupResult* result); - // TODO(yangguo): this should eventually replace the non-handlified version. - static Handle<Object> GetPropertyWithReceiver(Handle<Object> object, - Handle<Object> receiver, - Handle<Name> name, - PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetPropertyWithReceiver( - Object* receiver, - Name* key, + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithReceiver( + Handle<Object> object, + Handle<Object> receiver, + Handle<Name> name, PropertyAttributes* attributes); - - static Handle<Object> GetProperty(Handle<Object> object, - Handle<Name> key); - static Handle<Object> GetProperty(Handle<Object> object, - Handle<Object> receiver, - LookupResult* result, - Handle<Name> key, - PropertyAttributes* attributes); - - MUST_USE_RESULT static MaybeObject* GetPropertyOrFail( + MUST_USE_RESULT static inline MaybeHandle<Object> GetPropertyOrElement( + Handle<Object> object, + Handle<Name> key); + MUST_USE_RESULT static inline MaybeHandle<Object> GetProperty( + Isolate* isolate, + Handle<Object> object, + const char* key); + MUST_USE_RESULT static inline MaybeHandle<Object> GetProperty( + Handle<Object> object, + Handle<Name> key); + MUST_USE_RESULT static MaybeHandle<Object> GetProperty( Handle<Object> object, Handle<Object> receiver, LookupResult* result, Handle<Name> key, PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetProperty(Object* receiver, - LookupResult* result, - Name* key, - PropertyAttributes* attributes); - - MUST_USE_RESULT MaybeObject* GetPropertyWithDefinedGetter(Object* receiver, - JSReceiver* getter); - - static inline Handle<Object> GetElement(Isolate* isolate, - Handle<Object> object, - uint32_t index); + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithDefinedGetter( + Handle<Object> object, + Handle<Object> receiver, + Handle<JSReceiver> getter); - // For use when we know that no exception can be thrown. - static inline Handle<Object> GetElementNoExceptionThrown( + MUST_USE_RESULT static inline MaybeHandle<Object> GetElement( Isolate* isolate, Handle<Object> object, uint32_t index); - static Handle<Object> GetElementWithReceiver(Isolate* isolate, - Handle<Object> object, - Handle<Object> receiver, - uint32_t index); + MUST_USE_RESULT static MaybeHandle<Object> GetElementWithReceiver( + Isolate* isolate, + Handle<Object> object, + Handle<Object> receiver, + uint32_t index); // Return the object's prototype (might be Heap::null_value()). Object* GetPrototype(Isolate* isolate); + static Handle<Object> GetPrototype(Isolate* isolate, Handle<Object> object); Map* GetMarkerMap(Isolate* isolate); // Returns the permanent hash code associated with this object. May return @@ -1624,6 +1521,7 @@ class Object : public MaybeObject { // < the length of the string. Used to implement [] on strings. inline bool IsStringObjectWithCharacterAt(uint32_t index); + DECLARE_VERIFIER(Object) #ifdef VERIFY_HEAP // Verify a pointer is a valid object pointer. static void VerifyPointer(Object* p); @@ -1643,6 +1541,14 @@ class Object : public MaybeObject { // Layout description. static const int kHeaderSize = 0; // Object does not take up any space. +#ifdef OBJECT_PRINT + // Prints this object with details. + void Print(); + void Print(FILE* out); + void PrintLn(); + void PrintLn(FILE* out); +#endif + private: DISALLOW_IMPLICIT_CONSTRUCTORS(Object); }; @@ -1685,72 +1591,6 @@ class Smi: public Object { }; -// Failure is used for reporting out of memory situations and -// propagating exceptions through the runtime system. Failure objects -// are transient and cannot occur as part of the object graph. -// -// Failures are a single word, encoded as follows: -// +-------------------------+---+--+--+ -// |.........unused..........|sss|tt|11| -// +-------------------------+---+--+--+ -// 7 6 4 32 10 -// -// -// The low two bits, 0-1, are the failure tag, 11. The next two bits, -// 2-3, are a failure type tag 'tt' with possible values: -// 00 RETRY_AFTER_GC -// 01 EXCEPTION -// 10 INTERNAL_ERROR -// 11 OUT_OF_MEMORY_EXCEPTION -// -// The next three bits, 4-6, are an allocation space tag 'sss'. The -// allocation space tag is 000 for all failure types except -// RETRY_AFTER_GC. For RETRY_AFTER_GC, the possible values are the -// allocation spaces (the encoding is found in globals.h). - -// Failure type tag info. -const int kFailureTypeTagSize = 2; -const int kFailureTypeTagMask = (1 << kFailureTypeTagSize) - 1; - -class Failure: public MaybeObject { - public: - // RuntimeStubs assumes EXCEPTION = 1 in the compiler-generated code. - enum Type { - RETRY_AFTER_GC = 0, - EXCEPTION = 1, // Returning this marker tells the real exception - // is in Isolate::pending_exception. - INTERNAL_ERROR = 2, - OUT_OF_MEMORY_EXCEPTION = 3 - }; - - inline Type type() const; - - // Returns the space that needs to be collected for RetryAfterGC failures. - inline AllocationSpace allocation_space() const; - - inline bool IsInternalError() const; - - static inline Failure* RetryAfterGC(AllocationSpace space); - static inline Failure* RetryAfterGC(); // NEW_SPACE - static inline Failure* Exception(); - static inline Failure* InternalError(); - // Casting. - static inline Failure* cast(MaybeObject* object); - - // Dispatched behavior. - void FailurePrint(FILE* out = stdout); - void FailurePrint(StringStream* accumulator); - - DECLARE_VERIFIER(Failure) - - private: - inline intptr_t value() const; - static inline Failure* Construct(Type type, intptr_t value = 0); - - DISALLOW_IMPLICIT_CONSTRUCTORS(Failure); -}; - - // Heap objects typically have a map pointer in their first word. However, // during GC other data (e.g. mark bits, forwarding addresses) is sometimes // encoded in the first word. The class MapWord is an abstraction of the @@ -1811,6 +1651,15 @@ class HeapObject: public Object { // of primitive (non-JS) objects like strings, heap numbers etc. inline void set_map_no_write_barrier(Map* value); + // Get the map using acquire load. + inline Map* synchronized_map(); + inline MapWord synchronized_map_word(); + + // Set the map using release store + inline void synchronized_set_map(Map* value); + inline void synchronized_set_map_no_write_barrier(Map* value); + inline void synchronized_set_map_word(MapWord map_word); + // During garbage collection, the map word of a heap object does not // necessarily contain a map pointer. inline MapWord map_word(); @@ -1970,11 +1819,18 @@ class HeapNumber: public HeapObject { // Layout description. static const int kValueOffset = HeapObject::kHeaderSize; // IEEE doubles are two 32 bit words. The first is just mantissa, the second - // is a mixture of sign, exponent and mantissa. Our current platforms are all - // little endian apart from non-EABI arm which is little endian with big - // endian floating point word ordering! + // is a mixture of sign, exponent and mantissa. The offsets of two 32 bit + // words within double numbers are endian dependent and they are set + // accordingly. +#if defined(V8_TARGET_LITTLE_ENDIAN) static const int kMantissaOffset = kValueOffset; static const int kExponentOffset = kValueOffset + 4; +#elif defined(V8_TARGET_BIG_ENDIAN) + static const int kMantissaOffset = kValueOffset + 4; + static const int kExponentOffset = kValueOffset; +#else +#error Unknown byte ordering +#endif static const int kSize = kValueOffset + kDoubleSize; static const uint32_t kSignMask = 0x80000000u; @@ -2046,18 +1902,19 @@ class JSReceiver: public HeapObject { static inline JSReceiver* cast(Object* obj); // Implementation of [[Put]], ECMA-262 5th edition, section 8.12.5. - static Handle<Object> SetProperty(Handle<JSReceiver> object, - Handle<Name> key, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - StoreFromKeyed store_mode = - MAY_BE_STORE_FROM_KEYED); - static Handle<Object> SetElement(Handle<JSReceiver> object, - uint32_t index, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode); + MUST_USE_RESULT static MaybeHandle<Object> SetProperty( + Handle<JSReceiver> object, + Handle<Name> key, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED); + MUST_USE_RESULT static MaybeHandle<Object> SetElement( + Handle<JSReceiver> object, + uint32_t index, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode); // Implementation of [[HasProperty]], ECMA-262 5th edition, section 8.12.6. static inline bool HasProperty(Handle<JSReceiver> object, Handle<Name> name); @@ -2066,12 +1923,14 @@ class JSReceiver: public HeapObject { static inline bool HasLocalElement(Handle<JSReceiver> object, uint32_t index); // Implementation of [[Delete]], ECMA-262 5th edition, section 8.12.7. - static Handle<Object> DeleteProperty(Handle<JSReceiver> object, - Handle<Name> name, - DeleteMode mode = NORMAL_DELETION); - static Handle<Object> DeleteElement(Handle<JSReceiver> object, - uint32_t index, - DeleteMode mode = NORMAL_DELETION); + MUST_USE_RESULT static MaybeHandle<Object> DeleteProperty( + Handle<JSReceiver> object, + Handle<Name> name, + DeleteMode mode = NORMAL_DELETION); + MUST_USE_RESULT static MaybeHandle<Object> DeleteElement( + Handle<JSReceiver> object, + uint32_t index, + DeleteMode mode = NORMAL_DELETION); // Tests for the fast common case for property enumeration. bool IsSimpleEnum(); @@ -2118,16 +1977,23 @@ class JSReceiver: public HeapObject { // Lookup a property. If found, the result is valid and has // detailed information. - void LocalLookup(Name* name, LookupResult* result, + void LocalLookup(Handle<Name> name, LookupResult* result, bool search_hidden_prototypes = false); - void Lookup(Name* name, LookupResult* result); + void Lookup(Handle<Name> name, LookupResult* result); - protected: - Smi* GenerateIdentityHash(); + enum KeyCollectionType { LOCAL_ONLY, INCLUDE_PROTOS }; - static Handle<Object> SetPropertyWithDefinedSetter(Handle<JSReceiver> object, - Handle<JSReceiver> setter, - Handle<Object> value); + // Computes the enumerable keys for a JSObject. Used for implementing + // "for (n in object) { }". + MUST_USE_RESULT static MaybeHandle<FixedArray> GetKeys( + Handle<JSReceiver> object, + KeyCollectionType type); + + protected: + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithDefinedSetter( + Handle<JSReceiver> object, + Handle<JSReceiver> setter, + Handle<Object> value); private: static PropertyAttributes GetPropertyAttributeForResult( @@ -2137,13 +2003,14 @@ class JSReceiver: public HeapObject { Handle<Name> name, bool continue_search); - static Handle<Object> SetProperty(Handle<JSReceiver> receiver, - LookupResult* result, - Handle<Name> key, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode, - StoreFromKeyed store_from_keyed); + MUST_USE_RESULT static MaybeHandle<Object> SetProperty( + Handle<JSReceiver> receiver, + LookupResult* result, + Handle<Name> key, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode, + StoreFromKeyed store_from_keyed); DISALLOW_IMPLICIT_CONSTRUCTORS(JSReceiver); }; @@ -2151,6 +2018,10 @@ class JSReceiver: public HeapObject { // Forward declaration for JSObject::GetOrCreateHiddenPropertiesHashTable. class ObjectHashTable; +// Forward declaration for JSObject::Copy. +class AllocationSite; + + // The JSObject describes real heap allocated JavaScript objects with // properties. // Note that the map of JSObject changes during execution to enable inline @@ -2185,7 +2056,10 @@ class JSObject: public JSReceiver { // arguments object. DECL_ACCESSORS(elements, FixedArrayBase) inline void initialize_elements(); - MUST_USE_RESULT inline MaybeObject* ResetElements(); + static void ResetElements(Handle<JSObject> object); + static inline void SetMapAndElements(Handle<JSObject> object, + Handle<Map> map, + Handle<FixedArrayBase> elements); inline ElementsKind GetElementsKind(); inline ElementsAccessor* GetElementsAccessor(); // Returns true if an object has elements of FAST_SMI_ELEMENTS ElementsKind. @@ -2234,15 +2108,9 @@ class JSObject: public JSReceiver { bool HasDictionaryArgumentsElements(); inline SeededNumberDictionary* element_dictionary(); // Gets slow elements. - inline void set_map_and_elements( - Map* map, - FixedArrayBase* value, - WriteBarrierMode mode = UPDATE_WRITE_BARRIER); - // Requires: HasFastElements(). static Handle<FixedArray> EnsureWritableFastElements( Handle<JSObject> object); - MUST_USE_RESULT inline MaybeObject* EnsureWritableFastElements(); // Collects elements starting at index 0. // Undefined values are placed after non-undefined values. @@ -2250,17 +2118,18 @@ class JSObject: public JSReceiver { static Handle<Object> PrepareElementsForSort(Handle<JSObject> object, uint32_t limit); // As PrepareElementsForSort, but only on objects where elements is - // a dictionary, and it will stay a dictionary. + // a dictionary, and it will stay a dictionary. Collates undefined and + // unexisting elements below limit from position zero of the elements. static Handle<Object> PrepareSlowElementsForSort(Handle<JSObject> object, uint32_t limit); - MUST_USE_RESULT MaybeObject* PrepareSlowElementsForSort(uint32_t limit); - static Handle<Object> GetPropertyWithCallback(Handle<JSObject> object, - Handle<Object> receiver, - Handle<Object> structure, - Handle<Name> name); + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithCallback( + Handle<JSObject> object, + Handle<Object> receiver, + Handle<Object> structure, + Handle<Name> name); - static Handle<Object> SetPropertyWithCallback( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithCallback( Handle<JSObject> object, Handle<Object> structure, Handle<Name> name, @@ -2268,14 +2137,14 @@ class JSObject: public JSReceiver { Handle<JSObject> holder, StrictMode strict_mode); - static Handle<Object> SetPropertyWithInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithInterceptor( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes, StrictMode strict_mode); - static Handle<Object> SetPropertyForResult( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyForResult( Handle<JSObject> object, LookupResult* result, Handle<Name> name, @@ -2284,14 +2153,15 @@ class JSObject: public JSReceiver { StrictMode strict_mode, StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED); - static Handle<Object> SetLocalPropertyIgnoreAttributes( + MUST_USE_RESULT static MaybeHandle<Object> SetLocalPropertyIgnoreAttributes( Handle<JSObject> object, Handle<Name> key, Handle<Object> value, PropertyAttributes attributes, ValueType value_type = OPTIMAL_REPRESENTATION, StoreMode mode = ALLOW_AS_CONSTANT, - ExtensibilityCheck extensibility_check = PERFORM_EXTENSIBILITY_CHECK); + ExtensibilityCheck extensibility_check = PERFORM_EXTENSIBILITY_CHECK, + StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED); static inline Handle<String> ExpectedTransitionKey(Handle<Map> map); static inline Handle<Map> ExpectedTransitionTarget(Handle<Map> map); @@ -2310,12 +2180,14 @@ class JSObject: public JSReceiver { static void MigrateInstance(Handle<JSObject> instance); // Migrates the given object only if the target map is already available, - // or returns an empty handle if such a map is not yet available. - static Handle<Object> TryMigrateInstance(Handle<JSObject> instance); + // or returns false if such a map is not yet available. + static bool TryMigrateInstance(Handle<JSObject> instance); // Retrieve a value in a normalized object given a lookup result. // Handles the special representation of JS global objects. Object* GetNormalizedProperty(const LookupResult* result); + static Handle<Object> GetNormalizedProperty(Handle<JSObject> object, + const LookupResult* result); // Sets the property value in a normalized object given a lookup result. // Handles the special representation of JS global objects. @@ -2360,9 +2232,10 @@ class JSObject: public JSReceiver { // Retrieves an AccessorPair property from the given object. Might return // undefined if the property doesn't exist or is of a different kind. - static Handle<Object> GetAccessor(Handle<JSObject> object, - Handle<Name> name, - AccessorComponent component); + MUST_USE_RESULT static MaybeHandle<Object> GetAccessor( + Handle<JSObject> object, + Handle<Name> name, + AccessorComponent component); // Defines an AccessorPair property on the given object. // TODO(mstarzinger): Rename to SetAccessor() and return empty handle on @@ -2375,23 +2248,20 @@ class JSObject: public JSReceiver { v8::AccessControl access_control = v8::DEFAULT); // Defines an AccessorInfo property on the given object. - static Handle<Object> SetAccessor(Handle<JSObject> object, - Handle<AccessorInfo> info); + MUST_USE_RESULT static MaybeHandle<Object> SetAccessor( + Handle<JSObject> object, + Handle<AccessorInfo> info); - static Handle<Object> GetPropertyWithInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithInterceptor( Handle<JSObject> object, Handle<Object> receiver, Handle<Name> name, PropertyAttributes* attributes); - static Handle<Object> GetPropertyPostInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyPostInterceptor( Handle<JSObject> object, Handle<Object> receiver, Handle<Name> name, PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetLocalPropertyPostInterceptor( - Object* receiver, - Name* name, - PropertyAttributes* attributes); // Returns true if this is an instance of an api function and has // been modified since it was created. May give false positives. @@ -2414,7 +2284,7 @@ class JSObject: public JSReceiver { // Gets the value of a hidden property with the given key. Returns the hole // if the property doesn't exist (or if called on a detached proxy), // otherwise returns the value set for the key. - Object* GetHiddenProperty(Name* key); + Object* GetHiddenProperty(Handle<Name> key); // Deletes a hidden property. Deleting a non-existing property is // considered successful. static void DeleteHiddenProperty(Handle<JSObject> object, @@ -2424,7 +2294,7 @@ class JSObject: public JSReceiver { static void SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash); - inline void ValidateElements(); + static inline void ValidateElements(Handle<JSObject> object); // Makes sure that this object can contain HeapObject as elements. static inline void EnsureCanContainHeapObjectElements(Handle<JSObject> obj); @@ -2470,21 +2340,28 @@ class JSObject: public JSReceiver { } // These methods do not perform access checks! - AccessorPair* GetLocalPropertyAccessorPair(Name* name); - AccessorPair* GetLocalElementAccessorPair(uint32_t index); + MUST_USE_RESULT static MaybeHandle<AccessorPair> GetLocalPropertyAccessorPair( + Handle<JSObject> object, + Handle<Name> name); + MUST_USE_RESULT static MaybeHandle<AccessorPair> GetLocalElementAccessorPair( + Handle<JSObject> object, + uint32_t index); - static Handle<Object> SetFastElement(Handle<JSObject> object, uint32_t index, - Handle<Object> value, - StrictMode strict_mode, - bool check_prototype); + MUST_USE_RESULT static MaybeHandle<Object> SetFastElement( + Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode, + bool check_prototype); - static Handle<Object> SetOwnElement(Handle<JSObject> object, - uint32_t index, - Handle<Object> value, - StrictMode strict_mode); + MUST_USE_RESULT static MaybeHandle<Object> SetOwnElement( + Handle<JSObject> object, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode); // Empty handle is returned if the element cannot be set to the given value. - static Handle<Object> SetElement( + MUST_USE_RESULT static MaybeHandle<Object> SetElement( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -2495,9 +2372,10 @@ class JSObject: public JSReceiver { // Returns the index'th element. // The undefined object if index is out of bounds. - static Handle<Object> GetElementWithInterceptor(Handle<JSObject> object, - Handle<Object> receiver, - uint32_t index); + MUST_USE_RESULT static MaybeHandle<Object> GetElementWithInterceptor( + Handle<JSObject> object, + Handle<Object> receiver, + uint32_t index); enum SetFastElementsCapacitySmiMode { kAllowSmiElements, @@ -2517,15 +2395,21 @@ class JSObject: public JSReceiver { Handle<JSObject> object, int capacity, int length); - MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength( - int capacity, - int length); // Lookup interceptors are used for handling properties controlled by host // objects. inline bool HasNamedInterceptor(); inline bool HasIndexedInterceptor(); + // Computes the enumerable keys from interceptors. Used for debug mirrors and + // by JSReceiver::GetKeys. + MUST_USE_RESULT static MaybeHandle<JSObject> GetKeysForNamedInterceptor( + Handle<JSObject> object, + Handle<JSReceiver> receiver); + MUST_USE_RESULT static MaybeHandle<JSObject> GetKeysForIndexedInterceptor( + Handle<JSObject> object, + Handle<JSReceiver> receiver); + // Support functions for v8 api (needed for correct interceptor behavior). static bool HasRealNamedProperty(Handle<JSObject> object, Handle<Name> key); @@ -2544,10 +2428,11 @@ class JSObject: public JSReceiver { inline void SetInternalField(int index, Smi* value); // The following lookup functions skip interceptors. - void LocalLookupRealNamedProperty(Name* name, LookupResult* result); - void LookupRealNamedProperty(Name* name, LookupResult* result); - void LookupRealNamedPropertyInPrototypes(Name* name, LookupResult* result); - void LookupCallbackProperty(Name* name, LookupResult* result); + void LocalLookupRealNamedProperty(Handle<Name> name, LookupResult* result); + void LookupRealNamedProperty(Handle<Name> name, LookupResult* result); + void LookupRealNamedPropertyInPrototypes(Handle<Name> name, + LookupResult* result); + void LookupCallbackProperty(Handle<Name> name, LookupResult* result); // Returns the number of properties on this object filtering out properties // with the specified attributes (ignoring interceptors). @@ -2576,12 +2461,6 @@ class JSObject: public JSReceiver { // map and the ElementsKind set. static Handle<Map> GetElementsTransitionMap(Handle<JSObject> object, ElementsKind to_kind); - inline MUST_USE_RESULT MaybeObject* GetElementsTransitionMap( - Isolate* isolate, - ElementsKind elements_kind); - MUST_USE_RESULT MaybeObject* GetElementsTransitionMapSlow( - ElementsKind elements_kind); - static void TransitionElementsKind(Handle<JSObject> object, ElementsKind to_kind); @@ -2590,6 +2469,7 @@ class JSObject: public JSReceiver { static void GeneralizeFieldRepresentation(Handle<JSObject> object, int modify_index, Representation new_representation, + Handle<HeapType> new_field_type, StoreMode store_mode); // Convert the object to use the canonical dictionary @@ -2610,11 +2490,12 @@ class JSObject: public JSReceiver { int unused_property_fields); // Access fast-case object properties at index. - MUST_USE_RESULT inline MaybeObject* FastPropertyAt( - Representation representation, - int index); + static Handle<Object> FastPropertyAt(Handle<JSObject> object, + Representation representation, + int index); inline Object* RawFastPropertyAt(int index); inline void FastPropertyAtPut(int index, Object* value); + void WriteToField(int descriptor, Object* value); // Access to in object properties. inline int GetInObjectPropertyOffset(int index); @@ -2625,9 +2506,10 @@ class JSObject: public JSReceiver { = UPDATE_WRITE_BARRIER); // Set the object's prototype (only JSReceiver and null are allowed values). - static Handle<Object> SetPrototype(Handle<JSObject> object, - Handle<Object> value, - bool skip_hidden_prototypes = false); + MUST_USE_RESULT static MaybeHandle<Object> SetPrototype( + Handle<JSObject> object, + Handle<Object> value, + bool skip_hidden_prototypes = false); // Initializes the body after properties slot, properties slot is // initialized by set_properties. Fill the pre-allocated fields with @@ -2642,10 +2524,11 @@ class JSObject: public JSReceiver { bool ReferencesObject(Object* obj); // Disalow further properties to be added to the object. - static Handle<Object> PreventExtensions(Handle<JSObject> object); + MUST_USE_RESULT static MaybeHandle<Object> PreventExtensions( + Handle<JSObject> object); // ES5 Object.freeze - static Handle<Object> Freeze(Handle<JSObject> object); + MUST_USE_RESULT static MaybeHandle<Object> Freeze(Handle<JSObject> object); // Called the first time an object is observed with ES7 Object.observe. static void SetObserved(Handle<JSObject> object); @@ -2657,11 +2540,16 @@ class JSObject: public JSReceiver { }; static Handle<JSObject> Copy(Handle<JSObject> object); - static Handle<JSObject> DeepCopy(Handle<JSObject> object, - AllocationSiteUsageContext* site_context, - DeepCopyHints hints = kNoHints); - static Handle<JSObject> DeepWalk(Handle<JSObject> object, - AllocationSiteCreationContext* site_context); + MUST_USE_RESULT static MaybeHandle<JSObject> DeepCopy( + Handle<JSObject> object, + AllocationSiteUsageContext* site_context, + DeepCopyHints hints = kNoHints); + MUST_USE_RESULT static MaybeHandle<JSObject> DeepWalk( + Handle<JSObject> object, + AllocationSiteCreationContext* site_context); + + static Handle<Object> GetDataProperty(Handle<JSObject> object, + Handle<Name> key); // Casting. static inline JSObject* cast(Object* obj); @@ -2744,7 +2632,7 @@ class JSObject: public JSReceiver { static const int kInitialMaxFastElementArray = 100000; static const int kFastPropertiesSoftLimit = 12; - static const int kMaxFastProperties = 64; + static const int kMaxFastProperties = 128; static const int kMaxInstanceSize = 255 * kPointerSize; // When extending the backing storage for property values, we increase // its size by more than the 1 entry necessary, so sequentially adding fields @@ -2763,6 +2651,8 @@ class JSObject: public JSReceiver { static inline int SizeOf(Map* map, HeapObject* object); }; + Context* GetCreationContext(); + // Enqueue change record for Object.observe. May cause GC. static void EnqueueChangeRecord(Handle<JSObject> object, const char* type, @@ -2778,17 +2668,20 @@ class JSObject: public JSReceiver { ElementsKind to_kind); // Used from Object::GetProperty(). - static Handle<Object> GetPropertyWithFailedAccessCheck( + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithFailedAccessCheck( Handle<JSObject> object, Handle<Object> receiver, LookupResult* result, Handle<Name> name, PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetElementWithCallback(Object* receiver, - Object* structure, - uint32_t index, - Object* holder); + MUST_USE_RESULT static MaybeHandle<Object> GetElementWithCallback( + Handle<JSObject> object, + Handle<Object> receiver, + Handle<Object> structure, + uint32_t index, + Handle<Object> holder); + static PropertyAttributes GetElementAttributeWithInterceptor( Handle<JSObject> object, Handle<JSReceiver> receiver, @@ -2799,14 +2692,14 @@ class JSObject: public JSReceiver { Handle<JSReceiver> receiver, uint32_t index, bool continue_search); - static Handle<Object> SetElementWithCallback( + MUST_USE_RESULT static MaybeHandle<Object> SetElementWithCallback( Handle<JSObject> object, Handle<Object> structure, uint32_t index, Handle<Object> value, Handle<JSObject> holder, StrictMode strict_mode); - static Handle<Object> SetElementWithInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> SetElementWithInterceptor( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -2814,7 +2707,7 @@ class JSObject: public JSReceiver { StrictMode strict_mode, bool check_prototype, SetPropertyMode set_mode); - static Handle<Object> SetElementWithoutInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> SetElementWithoutInterceptor( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -2822,13 +2715,14 @@ class JSObject: public JSReceiver { StrictMode strict_mode, bool check_prototype, SetPropertyMode set_mode); - static Handle<Object> SetElementWithCallbackSetterInPrototypes( + MUST_USE_RESULT + static MaybeHandle<Object> SetElementWithCallbackSetterInPrototypes( Handle<JSObject> object, uint32_t index, Handle<Object> value, bool* found, StrictMode strict_mode); - static Handle<Object> SetDictionaryElement( + MUST_USE_RESULT static MaybeHandle<Object> SetDictionaryElement( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -2836,7 +2730,7 @@ class JSObject: public JSReceiver { StrictMode strict_mode, bool check_prototype, SetPropertyMode set_mode = SET_PROPERTY); - static Handle<Object> SetFastDoubleElement( + MUST_USE_RESULT static MaybeHandle<Object> SetFastDoubleElement( Handle<JSObject> object, uint32_t index, Handle<Object> value, @@ -2847,26 +2741,26 @@ class JSObject: public JSReceiver { // has a setter, invoke it and set '*done' to true. If it is found and is // read-only, reject and set '*done' to true. Otherwise, set '*done' to // false. Can throw and return an empty handle with '*done==true'. - static Handle<Object> SetPropertyViaPrototypes( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyViaPrototypes( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes, StrictMode strict_mode, bool* done); - static Handle<Object> SetPropertyPostInterceptor( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyPostInterceptor( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes, StrictMode strict_mode); - static Handle<Object> SetPropertyUsingTransition( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyUsingTransition( Handle<JSObject> object, LookupResult* lookup, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes); - static Handle<Object> SetPropertyWithFailedAccessCheck( + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithFailedAccessCheck( Handle<JSObject> object, LookupResult* result, Handle<Name> name, @@ -2875,7 +2769,7 @@ class JSObject: public JSReceiver { StrictMode strict_mode); // Add a property to an object. - static Handle<Object> AddProperty( + MUST_USE_RESULT static MaybeHandle<Object> AddProperty( Handle<JSObject> object, Handle<Name> name, Handle<Object> value, @@ -2887,18 +2781,6 @@ class JSObject: public JSReceiver { StoreMode mode = ALLOW_AS_CONSTANT, TransitionFlag flag = INSERT_TRANSITION); - // Add a constant function property to a fast-case object. - // This leaves a CONSTANT_TRANSITION in the old map, and - // if it is called on a second object with this map, a - // normal property is added instead, with a map transition. - // This avoids the creation of many maps with the same constant - // function, all orphaned. - static void AddConstantProperty(Handle<JSObject> object, - Handle<Name> name, - Handle<Object> constant, - PropertyAttributes attributes, - TransitionFlag flag); - // Add a property to a fast-case object. static void AddFastProperty(Handle<JSObject> object, Handle<Name> name, @@ -2908,31 +2790,39 @@ class JSObject: public JSReceiver { ValueType value_type, TransitionFlag flag); + static void MigrateToNewProperty(Handle<JSObject> object, + Handle<Map> transition, + Handle<Object> value); + // Add a property to a slow-case object. static void AddSlowProperty(Handle<JSObject> object, Handle<Name> name, Handle<Object> value, PropertyAttributes attributes); - static Handle<Object> DeleteProperty(Handle<JSObject> object, - Handle<Name> name, - DeleteMode mode); + MUST_USE_RESULT static MaybeHandle<Object> DeleteProperty( + Handle<JSObject> object, + Handle<Name> name, + DeleteMode mode); static Handle<Object> DeletePropertyPostInterceptor(Handle<JSObject> object, Handle<Name> name, DeleteMode mode); - static Handle<Object> DeletePropertyWithInterceptor(Handle<JSObject> object, - Handle<Name> name); + MUST_USE_RESULT static MaybeHandle<Object> DeletePropertyWithInterceptor( + Handle<JSObject> object, + Handle<Name> name); // Deletes the named property in a normalized object. static Handle<Object> DeleteNormalizedProperty(Handle<JSObject> object, Handle<Name> name, DeleteMode mode); - static Handle<Object> DeleteElement(Handle<JSObject> object, - uint32_t index, - DeleteMode mode); - static Handle<Object> DeleteElementWithInterceptor(Handle<JSObject> object, - uint32_t index); + MUST_USE_RESULT static MaybeHandle<Object> DeleteElement( + Handle<JSObject> object, + uint32_t index, + DeleteMode mode); + MUST_USE_RESULT static MaybeHandle<Object> DeleteElementWithInterceptor( + Handle<JSObject> object, + uint32_t index); bool ReferencesObjectFromElements(FixedArray* elements, ElementsKind kind, @@ -3008,6 +2898,10 @@ class FixedArrayBase: public HeapObject { inline int length(); inline void set_length(int value); + // Get and set the length using acquire loads and release stores. + inline int synchronized_length(); + inline void synchronized_set_length(int value); + inline static FixedArrayBase* cast(Object* object); // Layout description. @@ -3026,6 +2920,7 @@ class FixedArray: public FixedArrayBase { public: // Setter and getter for elements. inline Object* get(int index); + static inline Handle<Object> get(Handle<FixedArray> array, int index); // Setter that uses write barrier. inline void set(int index, Object* value); inline bool is_the_hole(int index); @@ -3046,19 +2941,26 @@ class FixedArray: public FixedArrayBase { // Gives access to raw memory which stores the array's data. inline Object** data_start(); + inline void FillWithHoles(int from, int to); + // Shrink length and insert filler objects. void Shrink(int length); - // Copy operations. - MUST_USE_RESULT inline MaybeObject* Copy(); - MUST_USE_RESULT MaybeObject* CopySize(int new_length, - PretenureFlag pretenure = NOT_TENURED); + // Copy operation. + static Handle<FixedArray> CopySize(Handle<FixedArray> array, + int new_length, + PretenureFlag pretenure = NOT_TENURED); // Add the elements of a JSArray to this FixedArray. - MUST_USE_RESULT MaybeObject* AddKeysFromJSArray(JSArray* array); + MUST_USE_RESULT static MaybeHandle<FixedArray> AddKeysFromArrayLike( + Handle<FixedArray> content, + Handle<JSObject> array); - // Compute the union of this and other. - MUST_USE_RESULT MaybeObject* UnionOfKeys(FixedArray* other); + // Computes the union of keys and return the result. + // Used for implementing "for (n in object) { }" + MUST_USE_RESULT static MaybeHandle<FixedArray> UnionOfKeys( + Handle<FixedArray> first, + Handle<FixedArray> second); // Copy a sub array from the receiver to dest. void CopyTo(int pos, FixedArray* dest, int dest_pos, int len); @@ -3136,18 +3038,13 @@ class FixedDoubleArray: public FixedArrayBase { // Setter and getter for elements. inline double get_scalar(int index); inline int64_t get_representation(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); - // TODO(ishell): Rename as get() once all usages handlified. - inline Handle<Object> get_as_handle(int index); + static inline Handle<Object> get(Handle<FixedDoubleArray> array, int index); inline void set(int index, double value); inline void set_the_hole(int index); // Checking for the hole. inline bool is_the_hole(int index); - // Copy operations - MUST_USE_RESULT inline MaybeObject* Copy(); - // Garbage collection support. inline static int SizeFor(int length) { return kHeaderSize + length * kDoubleSize; @@ -3156,6 +3053,8 @@ class FixedDoubleArray: public FixedArrayBase { // Gives access to raw memory which stores the array's data. inline double* data_start(); + inline void FillWithHoles(int from, int to); + // Code Generation support. static int OffsetOfElementAt(int index) { return SizeFor(index); } @@ -3194,6 +3093,12 @@ class FixedDoubleArray: public FixedArrayBase { // [first_int32_index()] ... [length - 1] : 32 bit entries class ConstantPoolArray: public FixedArrayBase { public: + enum WeakObjectState { + NO_WEAK_OBJECTS, + WEAK_OBJECTS_IN_OPTIMIZED_CODE, + WEAK_OBJECTS_IN_IC + }; + // Getters for the field storing the first index for different type entries. inline int first_code_ptr_index(); inline int first_heap_ptr_index(); @@ -3213,6 +3118,10 @@ class ConstantPoolArray: public FixedArrayBase { inline int32_t get_int32_entry(int index); inline double get_int64_entry_as_double(int index); + // Setter and getter for weak objects state + inline void set_weak_object_state(WeakObjectState state); + inline WeakObjectState get_weak_object_state(); + inline void set(int index, Address value); inline void set(int index, Object* value); inline void set(int index, int64_t value); @@ -3220,13 +3129,10 @@ class ConstantPoolArray: public FixedArrayBase { inline void set(int index, int32_t value); // Set up initial state. - inline void SetEntryCounts(int number_of_int64_entries, - int number_of_code_ptr_entries, - int number_of_heap_ptr_entries, - int number_of_int32_entries); - - // Copy operations - MUST_USE_RESULT inline MaybeObject* Copy(); + inline void Init(int number_of_int64_entries, + int number_of_code_ptr_entries, + int number_of_heap_ptr_entries, + int number_of_int32_entries); // Garbage collection support. inline static int SizeFor(int number_of_int64_entries, @@ -3266,12 +3172,16 @@ class ConstantPoolArray: public FixedArrayBase { } // Layout description. - static const int kFirstCodePointerIndexOffset = FixedArray::kHeaderSize; - static const int kFirstHeapPointerIndexOffset = - kFirstCodePointerIndexOffset + kPointerSize; - static const int kFirstInt32IndexOffset = - kFirstHeapPointerIndexOffset + kPointerSize; - static const int kFirstOffset = kFirstInt32IndexOffset + kPointerSize; + static const int kArrayLayoutOffset = FixedArray::kHeaderSize; + static const int kFirstOffset = kArrayLayoutOffset + kPointerSize; + + static const int kFieldBitSize = 10; + static const int kMaxEntriesPerType = (1 << kFieldBitSize) - 1; + + class NumberOfInt64EntriesField: public BitField<int, 0, kFieldBitSize> {}; + class NumberOfCodePtrEntriesField: public BitField<int, 10, kFieldBitSize> {}; + class NumberOfHeapPtrEntriesField: public BitField<int, 20, kFieldBitSize> {}; + class WeakObjectStateField: public BitField<WeakObjectState, 30, 2> {}; // Dispatched behavior. void ConstantPoolIterateBody(ObjectVisitor* v); @@ -3280,10 +3190,6 @@ class ConstantPoolArray: public FixedArrayBase { DECLARE_VERIFIER(ConstantPoolArray) private: - inline void set_first_code_ptr_index(int value); - inline void set_first_heap_ptr_index(int value); - inline void set_first_int32_index(int value); - inline static int OffsetAt(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, @@ -3309,23 +3215,6 @@ class ConstantPoolArray: public FixedArrayBase { // [2 + number of descriptors * kDescriptorSize]: start of slack class DescriptorArray: public FixedArray { public: - // WhitenessWitness is used to prove that a descriptor array is white - // (unmarked), so incremental write barriers can be skipped because the - // marking invariant cannot be broken and slots pointing into evacuation - // candidates will be discovered when the object is scanned. A witness is - // always stack-allocated right after creating an array. By allocating a - // witness, incremental marking is globally disabled. The witness is then - // passed along wherever needed to statically prove that the array is known to - // be white. - class WhitenessWitness { - public: - inline explicit WhitenessWitness(FixedArray* array); - inline ~WhitenessWitness(); - - private: - IncrementalMarking* marking_; - }; - // Returns true for both shared empty_descriptor_array and for smis, which the // map uses to encode additional bit fields when the descriptor array is not // yet used. @@ -3396,12 +3285,14 @@ class DescriptorArray: public FixedArray { inline Name* GetKey(int descriptor_number); inline Object** GetKeySlot(int descriptor_number); inline Object* GetValue(int descriptor_number); + inline void SetValue(int descriptor_number, Object* value); inline Object** GetValueSlot(int descriptor_number); inline Object** GetDescriptorStartSlot(int descriptor_number); inline Object** GetDescriptorEndSlot(int descriptor_number); inline PropertyDetails GetDetails(int descriptor_number); inline PropertyType GetType(int descriptor_number); inline int GetFieldIndex(int descriptor_number); + inline HeapType* GetFieldType(int descriptor_number); inline Object* GetConstant(int descriptor_number); inline Object* GetCallbacksObject(int descriptor_number); inline AccessorDescriptor* GetCallbacks(int descriptor_number); @@ -3409,59 +3300,28 @@ class DescriptorArray: public FixedArray { inline Name* GetSortedKey(int descriptor_number); inline int GetSortedKeyIndex(int descriptor_number); inline void SetSortedKey(int pointer, int descriptor_number); - inline void InitializeRepresentations(Representation representation); inline void SetRepresentation(int descriptor_number, Representation representation); // Accessor for complete descriptor. inline void Get(int descriptor_number, Descriptor* desc); - inline void Set(int descriptor_number, - Descriptor* desc, - const WhitenessWitness&); inline void Set(int descriptor_number, Descriptor* desc); + void Replace(int descriptor_number, Descriptor* descriptor); // Append automatically sets the enumeration index. This should only be used // to add descriptors in bulk at the end, followed by sorting the descriptor // array. - inline void Append(Descriptor* desc, const WhitenessWitness&); inline void Append(Descriptor* desc); - // Transfer a complete descriptor from the src descriptor array to this - // descriptor array. - void CopyFrom(int dst_index, - DescriptorArray* src, - int src_index, - const WhitenessWitness&); - static Handle<DescriptorArray> Merge(Handle<DescriptorArray> desc, - int verbatim, - int valid, - int new_size, - int modify_index, - StoreMode store_mode, - Handle<DescriptorArray> other); - MUST_USE_RESULT MaybeObject* Merge(int verbatim, - int valid, - int new_size, - int modify_index, - StoreMode store_mode, - DescriptorArray* other); - - bool IsMoreGeneralThan(int verbatim, - int valid, - int new_size, - DescriptorArray* other); - - MUST_USE_RESULT MaybeObject* CopyUpTo(int enumeration_index) { - return CopyUpToAddAttributes(enumeration_index, NONE); - } + static Handle<DescriptorArray> CopyUpTo(Handle<DescriptorArray> desc, + int enumeration_index, + int slack = 0); static Handle<DescriptorArray> CopyUpToAddAttributes( Handle<DescriptorArray> desc, int enumeration_index, - PropertyAttributes attributes); - MUST_USE_RESULT MaybeObject* CopyUpToAddAttributes( - int enumeration_index, - PropertyAttributes attributes); + PropertyAttributes attributes, + int slack = 0); // Sort the instance descriptors by the hash codes of their keys. void Sort(); @@ -3475,9 +3335,9 @@ class DescriptorArray: public FixedArray { // Allocates a DescriptorArray, but returns the singleton // empty descriptor array object if number_of_descriptors is 0. - MUST_USE_RESULT static MaybeObject* Allocate(Isolate* isolate, - int number_of_descriptors, - int slack = 0); + static Handle<DescriptorArray> Allocate(Isolate* isolate, + int number_of_descriptors, + int slack = 0); // Casting. static inline DescriptorArray* cast(Object* obj); @@ -3531,6 +3391,23 @@ class DescriptorArray: public FixedArray { } private: + // WhitenessWitness is used to prove that a descriptor array is white + // (unmarked), so incremental write barriers can be skipped because the + // marking invariant cannot be broken and slots pointing into evacuation + // candidates will be discovered when the object is scanned. A witness is + // always stack-allocated right after creating an array. By allocating a + // witness, incremental marking is globally disabled. The witness is then + // passed along wherever needed to statically prove that the array is known to + // be white. + class WhitenessWitness { + public: + inline explicit WhitenessWitness(DescriptorArray* array); + inline ~WhitenessWitness(); + + private: + IncrementalMarking* marking_; + }; + // An entry in a DescriptorArray, represented as an (array, index) pair. class Entry { public: @@ -3564,6 +3441,18 @@ class DescriptorArray: public FixedArray { kDescriptorValue; } + // Transfer a complete descriptor from the src descriptor array to this + // descriptor array. + void CopyFrom(int index, + DescriptorArray* src, + const WhitenessWitness&); + + inline void Set(int descriptor_number, + Descriptor* desc, + const WhitenessWitness&); + + inline void Append(Descriptor* desc, const WhitenessWitness&); + // Swap first and second descriptor. inline void SwapSortedKeys(int first, int second); @@ -3603,7 +3492,7 @@ inline int Search(T* array, Name* name, int valid_entries = 0); // // Returns the hash value for object. // static uint32_t HashForObject(Key key, Object* object); // // Convert key to an object. -// static inline Object* AsObject(Heap* heap, Key key); +// static inline Handle<Object> AsHandle(Isolate* isolate, Key key); // // The prefix size indicates number of elements in the beginning // // of the backing storage. // static const int kPrefixSize = ..; @@ -3630,14 +3519,13 @@ class BaseShape { } }; -template<typename Shape, typename Key> +template<typename Derived, typename Shape, typename Key> class HashTable: public FixedArray { public: // Wrapper methods inline uint32_t Hash(Key key) { if (Shape::UsesSeed) { - return Shape::SeededHash(key, - GetHeap()->HashSeed()); + return Shape::SeededHash(key, GetHeap()->HashSeed()); } else { return Shape::Hash(key); } @@ -3645,8 +3533,7 @@ class HashTable: public FixedArray { inline uint32_t HashForObject(Key key, Object* object) { if (Shape::UsesSeed) { - return Shape::SeededHashForObject(key, - GetHeap()->HashSeed(), object); + return Shape::SeededHashForObject(key, GetHeap()->HashSeed(), object); } else { return Shape::HashForObject(key, object); } @@ -3682,9 +3569,9 @@ class HashTable: public FixedArray { SetNumberOfDeletedElements(NumberOfDeletedElements() + n); } - // Returns a new HashTable object. Might return Failure. - MUST_USE_RESULT static MaybeObject* Allocate( - Heap* heap, + // Returns a new HashTable object. + MUST_USE_RESULT static Handle<Derived> New( + Isolate* isolate, int at_least_space_for, MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure = NOT_TENURED); @@ -3743,7 +3630,6 @@ class HashTable: public FixedArray { void Rehash(Key key); protected: - friend class ObjectHashSet; friend class ObjectHashTable; // Find the entry at which to insert element with the given key that @@ -3791,6 +3677,17 @@ class HashTable: public FixedArray { return (last + number) & (size - 1); } + // Attempt to shrink hash table after removal of key. + MUST_USE_RESULT static Handle<Derived> Shrink(Handle<Derived> table, Key key); + + // Ensure enough space for n additional elements. + MUST_USE_RESULT static Handle<Derived> EnsureCapacity( + Handle<Derived> table, + int n, + Key key, + PretenureFlag pretenure = NOT_TENURED); + + private: // Returns _expected_ if one of entries given by the first _probe_ probes is // equal to _expected_. Otherwise, returns the entry given by the probe // number _probe_. @@ -3799,16 +3696,7 @@ class HashTable: public FixedArray { void Swap(uint32_t entry1, uint32_t entry2, WriteBarrierMode mode); // Rehashes this hash-table into the new table. - MUST_USE_RESULT MaybeObject* Rehash(HashTable* new_table, Key key); - - // Attempt to shrink hash table after removal of key. - MUST_USE_RESULT MaybeObject* Shrink(Key key); - - // Ensure enough space for n additional elements. - MUST_USE_RESULT MaybeObject* EnsureCapacity( - int n, - Key key, - PretenureFlag pretenure = NOT_TENURED); + void Rehash(Handle<Derived> new_table, Key key); }; @@ -3822,8 +3710,7 @@ class HashTableKey { // Returns the hash value for object. virtual uint32_t HashForObject(Object* key) = 0; // Returns the key object for storing into the hash table. - // If allocations fails a failure object is returned. - MUST_USE_RESULT virtual MaybeObject* AsObject(Heap* heap) = 0; + MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) = 0; // Required. virtual ~HashTableKey() {} }; @@ -3834,16 +3721,16 @@ class StringTableShape : public BaseShape<HashTableKey*> { static inline bool IsMatch(HashTableKey* key, Object* value) { return key->IsMatch(value); } + static inline uint32_t Hash(HashTableKey* key) { return key->Hash(); } + static inline uint32_t HashForObject(HashTableKey* key, Object* object) { return key->HashForObject(object); } - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - HashTableKey* key) { - return key->AsObject(heap); - } + + static inline Handle<Object> AsHandle(Isolate* isolate, HashTableKey* key); static const int kPrefixSize = 0; static const int kEntrySize = 1; @@ -3855,20 +3742,30 @@ class SeqOneByteString; // // No special elements in the prefix and the element size is 1 // because only the string itself (the key) needs to be stored. -class StringTable: public HashTable<StringTableShape, HashTableKey*> { - public: - // Find string in the string table. If it is not there yet, it is - // added. The return value is the string table which might have - // been enlarged. If the return value is not a failure, the string - // pointer *s is set to the string found. - MUST_USE_RESULT MaybeObject* LookupString(String* key, Object** s); - MUST_USE_RESULT MaybeObject* LookupKey(HashTableKey* key, Object** s); +class StringTable: public HashTable<StringTable, + StringTableShape, + HashTableKey*> { + public: + // Find string in the string table. If it is not there yet, it is + // added. The return value is the string found. + static Handle<String> LookupString(Isolate* isolate, Handle<String> key); + static Handle<String> LookupKey(Isolate* isolate, HashTableKey* key); + + // Tries to internalize given string and returns string handle on success + // or an empty handle otherwise. + MUST_USE_RESULT static MaybeHandle<String> InternalizeStringIfExists( + Isolate* isolate, + Handle<String> string); // Looks up a string that is equal to the given string and returns - // true if it is found, assigning the string to the given output - // parameter. - bool LookupStringIfExists(String* str, String** result); - bool LookupTwoCharsStringIfExists(uint16_t c1, uint16_t c2, String** result); + // string handle if it is found, or an empty handle otherwise. + MUST_USE_RESULT static MaybeHandle<String> LookupStringIfExists( + Isolate* isolate, + Handle<String> str); + MUST_USE_RESULT static MaybeHandle<String> LookupTwoCharsStringIfExists( + Isolate* isolate, + uint16_t c1, + uint16_t c2); // Casting. static inline StringTable* cast(Object* obj); @@ -3885,6 +3782,7 @@ class MapCacheShape : public BaseShape<HashTableKey*> { static inline bool IsMatch(HashTableKey* key, Object* value) { return key->IsMatch(value); } + static inline uint32_t Hash(HashTableKey* key) { return key->Hash(); } @@ -3893,10 +3791,7 @@ class MapCacheShape : public BaseShape<HashTableKey*> { return key->HashForObject(object); } - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - HashTableKey* key) { - return key->AsObject(heap); - } + static inline Handle<Object> AsHandle(Isolate* isolate, HashTableKey* key); static const int kPrefixSize = 0; static const int kEntrySize = 2; @@ -3907,11 +3802,12 @@ class MapCacheShape : public BaseShape<HashTableKey*> { // // Maps keys that are a fixed array of unique names to a map. // Used for canonicalize maps for object literals. -class MapCache: public HashTable<MapCacheShape, HashTableKey*> { +class MapCache: public HashTable<MapCache, MapCacheShape, HashTableKey*> { public: // Find cached value for a name key, otherwise return null. Object* Lookup(FixedArray* key); - MUST_USE_RESULT MaybeObject* Put(FixedArray* key, Map* value); + static Handle<MapCache> Put( + Handle<MapCache> map_cache, Handle<FixedArray> key, Handle<Map> value); static inline MapCache* cast(Object* obj); private: @@ -3919,43 +3815,53 @@ class MapCache: public HashTable<MapCacheShape, HashTableKey*> { }; -template <typename Shape, typename Key> -class Dictionary: public HashTable<Shape, Key> { +template <typename Derived, typename Shape, typename Key> +class Dictionary: public HashTable<Derived, Shape, Key> { + protected: + typedef HashTable<Derived, Shape, Key> DerivedHashTable; + public: - static inline Dictionary<Shape, Key>* cast(Object* obj) { - return reinterpret_cast<Dictionary<Shape, Key>*>(obj); + static inline Dictionary* cast(Object* obj) { + return reinterpret_cast<Dictionary*>(obj); } // Returns the value at entry. Object* ValueAt(int entry) { - return this->get(HashTable<Shape, Key>::EntryToIndex(entry) + 1); + return this->get(DerivedHashTable::EntryToIndex(entry) + 1); } // Set the value for entry. void ValueAtPut(int entry, Object* value) { - this->set(HashTable<Shape, Key>::EntryToIndex(entry) + 1, value); + this->set(DerivedHashTable::EntryToIndex(entry) + 1, value); } // Returns the property details for the property at entry. PropertyDetails DetailsAt(int entry) { ASSERT(entry >= 0); // Not found is -1, which is not caught by get(). return PropertyDetails( - Smi::cast(this->get(HashTable<Shape, Key>::EntryToIndex(entry) + 2))); + Smi::cast(this->get(DerivedHashTable::EntryToIndex(entry) + 2))); } // Set the details for entry. void DetailsAtPut(int entry, PropertyDetails value) { - this->set(HashTable<Shape, Key>::EntryToIndex(entry) + 2, value.AsSmi()); + this->set(DerivedHashTable::EntryToIndex(entry) + 2, value.AsSmi()); } // Sorting support void CopyValuesTo(FixedArray* elements); // Delete a property from the dictionary. - Object* DeleteProperty(int entry, JSObject::DeleteMode mode); + static Handle<Object> DeleteProperty( + Handle<Derived> dictionary, + int entry, + JSObject::DeleteMode mode); // Attempt to shrink the dictionary after deletion of key. - MUST_USE_RESULT MaybeObject* Shrink(Key key); + MUST_USE_RESULT static inline Handle<Derived> Shrink( + Handle<Derived> dictionary, + Key key) { + return DerivedHashTable::Shrink(dictionary, key); + } // Returns the number of elements in the dictionary filtering out properties // with the specified attributes. @@ -3982,17 +3888,17 @@ class Dictionary: public HashTable<Shape, Key> { } int NextEnumerationIndex() { - return Smi::cast(FixedArray::get(kNextEnumerationIndexIndex))->value(); + return Smi::cast(this->get(kNextEnumerationIndexIndex))->value(); } - // Returns a new array for dictionary usage. Might return Failure. - MUST_USE_RESULT static MaybeObject* Allocate( - Heap* heap, + // Creates a new dictionary. + MUST_USE_RESULT static Handle<Derived> New( + Isolate* isolate, int at_least_space_for, PretenureFlag pretenure = NOT_TENURED); // Ensure enough space for n additional elements. - MUST_USE_RESULT MaybeObject* EnsureCapacity(int n, Key key); + static Handle<Derived> EnsureCapacity(Handle<Derived> obj, int n, Key key); #ifdef OBJECT_PRINT void Print(FILE* out = stdout); @@ -4002,49 +3908,59 @@ class Dictionary: public HashTable<Shape, Key> { // Sets the entry to (key, value) pair. inline void SetEntry(int entry, - Object* key, - Object* value); + Handle<Object> key, + Handle<Object> value); inline void SetEntry(int entry, - Object* key, - Object* value, + Handle<Object> key, + Handle<Object> value, PropertyDetails details); - MUST_USE_RESULT MaybeObject* Add(Key key, - Object* value, - PropertyDetails details); + MUST_USE_RESULT static Handle<Derived> Add( + Handle<Derived> dictionary, + Key key, + Handle<Object> value, + PropertyDetails details); protected: // Generic at put operation. - MUST_USE_RESULT MaybeObject* AtPut(Key key, Object* value); + MUST_USE_RESULT static Handle<Derived> AtPut( + Handle<Derived> dictionary, + Key key, + Handle<Object> value); // Add entry to dictionary. - MUST_USE_RESULT MaybeObject* AddEntry(Key key, - Object* value, - PropertyDetails details, - uint32_t hash); + static void AddEntry( + Handle<Derived> dictionary, + Key key, + Handle<Object> value, + PropertyDetails details, + uint32_t hash); // Generate new enumeration indices to avoid enumeration index overflow. - MUST_USE_RESULT MaybeObject* GenerateNewEnumerationIndices(); - static const int kMaxNumberKeyIndex = - HashTable<Shape, Key>::kPrefixStartIndex; + static void GenerateNewEnumerationIndices(Handle<Derived> dictionary); + static const int kMaxNumberKeyIndex = DerivedHashTable::kPrefixStartIndex; static const int kNextEnumerationIndexIndex = kMaxNumberKeyIndex + 1; }; -class NameDictionaryShape : public BaseShape<Name*> { +class NameDictionaryShape : public BaseShape<Handle<Name> > { public: - static inline bool IsMatch(Name* key, Object* other); - static inline uint32_t Hash(Name* key); - static inline uint32_t HashForObject(Name* key, Object* object); - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - Name* key); + static inline bool IsMatch(Handle<Name> key, Object* other); + static inline uint32_t Hash(Handle<Name> key); + static inline uint32_t HashForObject(Handle<Name> key, Object* object); + static inline Handle<Object> AsHandle(Isolate* isolate, Handle<Name> key); static const int kPrefixSize = 2; static const int kEntrySize = 3; static const bool kIsEnumerable = true; }; -class NameDictionary: public Dictionary<NameDictionaryShape, Name*> { +class NameDictionary: public Dictionary<NameDictionary, + NameDictionaryShape, + Handle<Name> > { + typedef Dictionary< + NameDictionary, NameDictionaryShape, Handle<Name> > DerivedDictionary; + public: static inline NameDictionary* cast(Object* obj) { ASSERT(obj->IsDictionary()); @@ -4053,25 +3969,19 @@ class NameDictionary: public Dictionary<NameDictionaryShape, Name*> { // Copies enumerable keys to preallocated fixed array. void CopyEnumKeysTo(FixedArray* storage); - static void DoGenerateNewEnumerationIndices( + inline static void DoGenerateNewEnumerationIndices( Handle<NameDictionary> dictionary); - // For transforming properties of a JSObject. - MUST_USE_RESULT MaybeObject* TransformPropertiesToFastFor( - JSObject* obj, - int unused_property_fields); - // Find entry for key, otherwise return kNotFound. Optimized version of // HashTable::FindEntry. - int FindEntry(Name* key); + int FindEntry(Handle<Name> key); }; class NumberDictionaryShape : public BaseShape<uint32_t> { public: static inline bool IsMatch(uint32_t key, Object* other); - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - uint32_t key); + static inline Handle<Object> AsHandle(Isolate* isolate, uint32_t key); static const int kEntrySize = 3; static const bool kIsEnumerable = false; }; @@ -4099,7 +4009,9 @@ class UnseededNumberDictionaryShape : public NumberDictionaryShape { class SeededNumberDictionary - : public Dictionary<SeededNumberDictionaryShape, uint32_t> { + : public Dictionary<SeededNumberDictionary, + SeededNumberDictionaryShape, + uint32_t> { public: static SeededNumberDictionary* cast(Object* obj) { ASSERT(obj->IsDictionary()); @@ -4107,28 +4019,24 @@ class SeededNumberDictionary } // Type specific at put (default NONE attributes is used when adding). - MUST_USE_RESULT MaybeObject* AtNumberPut(uint32_t key, Object* value); + MUST_USE_RESULT static Handle<SeededNumberDictionary> AtNumberPut( + Handle<SeededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value); MUST_USE_RESULT static Handle<SeededNumberDictionary> AddNumberEntry( Handle<SeededNumberDictionary> dictionary, uint32_t key, Handle<Object> value, PropertyDetails details); - MUST_USE_RESULT MaybeObject* AddNumberEntry(uint32_t key, - Object* value, - PropertyDetails details); // Set an existing entry or add a new one if needed. // Return the updated dictionary. MUST_USE_RESULT static Handle<SeededNumberDictionary> Set( Handle<SeededNumberDictionary> dictionary, - uint32_t index, + uint32_t key, Handle<Object> value, PropertyDetails details); - MUST_USE_RESULT MaybeObject* Set(uint32_t key, - Object* value, - PropertyDetails details); - void UpdateMaxNumberKey(uint32_t key); // If slow elements are required we will never go back to fast-case @@ -4152,7 +4060,9 @@ class SeededNumberDictionary class UnseededNumberDictionary - : public Dictionary<UnseededNumberDictionaryShape, uint32_t> { + : public Dictionary<UnseededNumberDictionary, + UnseededNumberDictionaryShape, + uint32_t> { public: static UnseededNumberDictionary* cast(Object* obj) { ASSERT(obj->IsDictionary()); @@ -4160,87 +4070,56 @@ class UnseededNumberDictionary } // Type specific at put (default NONE attributes is used when adding). - MUST_USE_RESULT MaybeObject* AtNumberPut(uint32_t key, Object* value); - MUST_USE_RESULT MaybeObject* AddNumberEntry(uint32_t key, Object* value); + MUST_USE_RESULT static Handle<UnseededNumberDictionary> AtNumberPut( + Handle<UnseededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value); + MUST_USE_RESULT static Handle<UnseededNumberDictionary> AddNumberEntry( + Handle<UnseededNumberDictionary> dictionary, + uint32_t key, + Handle<Object> value); // Set an existing entry or add a new one if needed. // Return the updated dictionary. MUST_USE_RESULT static Handle<UnseededNumberDictionary> Set( Handle<UnseededNumberDictionary> dictionary, - uint32_t index, + uint32_t key, Handle<Object> value); - - MUST_USE_RESULT MaybeObject* Set(uint32_t key, Object* value); }; -template <int entrysize> -class ObjectHashTableShape : public BaseShape<Object*> { +class ObjectHashTableShape : public BaseShape<Handle<Object> > { public: - static inline bool IsMatch(Object* key, Object* other); - static inline uint32_t Hash(Object* key); - static inline uint32_t HashForObject(Object* key, Object* object); - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - Object* key); + static inline bool IsMatch(Handle<Object> key, Object* other); + static inline uint32_t Hash(Handle<Object> key); + static inline uint32_t HashForObject(Handle<Object> key, Object* object); + static inline Handle<Object> AsHandle(Isolate* isolate, Handle<Object> key); static const int kPrefixSize = 0; - static const int kEntrySize = entrysize; -}; - - -// ObjectHashSet holds keys that are arbitrary objects by using the identity -// hash of the key for hashing purposes. -class ObjectHashSet: public HashTable<ObjectHashTableShape<1>, Object*> { - public: - static inline ObjectHashSet* cast(Object* obj) { - ASSERT(obj->IsHashTable()); - return reinterpret_cast<ObjectHashSet*>(obj); - } - - // Looks up whether the given key is part of this hash set. - bool Contains(Object* key); - - static Handle<ObjectHashSet> EnsureCapacity( - Handle<ObjectHashSet> table, - int n, - Handle<Object> key, - PretenureFlag pretenure = NOT_TENURED); - - // Attempt to shrink hash table after removal of key. - static Handle<ObjectHashSet> Shrink(Handle<ObjectHashSet> table, - Handle<Object> key); - - // Adds the given key to this hash set. - static Handle<ObjectHashSet> Add(Handle<ObjectHashSet> table, - Handle<Object> key); - - // Removes the given key from this hash set. - static Handle<ObjectHashSet> Remove(Handle<ObjectHashSet> table, - Handle<Object> key); + static const int kEntrySize = 2; }; // ObjectHashTable maps keys that are arbitrary objects to object values by // using the identity hash of the key for hashing purposes. -class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> { +class ObjectHashTable: public HashTable<ObjectHashTable, + ObjectHashTableShape, + Handle<Object> > { + typedef HashTable< + ObjectHashTable, ObjectHashTableShape, Handle<Object> > DerivedHashTable; public: static inline ObjectHashTable* cast(Object* obj) { ASSERT(obj->IsHashTable()); return reinterpret_cast<ObjectHashTable*>(obj); } - static Handle<ObjectHashTable> EnsureCapacity( - Handle<ObjectHashTable> table, - int n, - Handle<Object> key, - PretenureFlag pretenure = NOT_TENURED); - // Attempt to shrink hash table after removal of key. - static Handle<ObjectHashTable> Shrink(Handle<ObjectHashTable> table, - Handle<Object> key); + MUST_USE_RESULT static inline Handle<ObjectHashTable> Shrink( + Handle<ObjectHashTable> table, + Handle<Object> key); // Looks up the value associated with the given key. The hole value is // returned in case the key is not present. - Object* Lookup(Object* key); + Object* Lookup(Handle<Object> key); // Adds (or overwrites) the value associated with the given key. Mapping a // key to the hole value causes removal of the whole entry. @@ -4261,14 +4140,191 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> { }; +// OrderedHashTable is a HashTable with Object keys that preserves +// insertion order. There are Map and Set interfaces (OrderedHashMap +// and OrderedHashTable, below). It is meant to be used by JSMap/JSSet. +// +// Only Object* keys are supported, with Object::SameValue() used as the +// equality operator and Object::GetHash() for the hash function. +// +// Based on the "Deterministic Hash Table" as described by Jason Orendorff at +// https://wiki.mozilla.org/User:Jorend/Deterministic_hash_tables +// Originally attributed to Tyler Close. +// +// Memory layout: +// [0]: bucket count +// [1]: element count +// [2]: deleted element count +// [3]: live iterators (doubly-linked list) +// [4..(NumberOfBuckets() - 1)]: "hash table", where each item is an offset +// into the data table (see below) where the +// first item in this bucket is stored. +// [4 + NumberOfBuckets()..length]: "data table", an array of length +// Capacity() * kEntrySize, where the first entrysize +// items are handled by the derived class and the +// item at kChainOffset is another entry into the +// data table indicating the next entry in this hash +// bucket. +template<class Derived, class Iterator, int entrysize> +class OrderedHashTable: public FixedArray { + public: + // Returns an OrderedHashTable with a capacity of at least |capacity|. + static Handle<Derived> Allocate( + Isolate* isolate, int capacity, PretenureFlag pretenure = NOT_TENURED); + + // Returns an OrderedHashTable (possibly |table|) with enough space + // to add at least one new element. + static Handle<Derived> EnsureGrowable(Handle<Derived> table); + + // Returns an OrderedHashTable (possibly |table|) that's shrunken + // if possible. + static Handle<Derived> Shrink(Handle<Derived> table); + + // Returns a new empty OrderedHashTable and updates all the iterators to + // point to the new table. + static Handle<Derived> Clear(Handle<Derived> table); + + // Returns kNotFound if the key isn't present. + int FindEntry(Handle<Object> key); + + int NumberOfElements() { + return Smi::cast(get(kNumberOfElementsIndex))->value(); + } + + int NumberOfDeletedElements() { + return Smi::cast(get(kNumberOfDeletedElementsIndex))->value(); + } + + int UsedCapacity() { return NumberOfElements() + NumberOfDeletedElements(); } + + int NumberOfBuckets() { + return Smi::cast(get(kNumberOfBucketsIndex))->value(); + } + + Object* iterators() { return get(kIteratorsIndex); } + + void set_iterators(Object* value) { set(kIteratorsIndex, value); } + + // Returns the index into the data table where the new entry + // should be placed. The table is assumed to have enough space + // for a new entry. + int AddEntry(int hash); + + // Removes the entry, and puts the_hole in entrysize pointers + // (leaving the hash table chain intact). + void RemoveEntry(int entry); + + // Returns an index into |this| for the given entry. + int EntryToIndex(int entry) { + return kHashTableStartIndex + NumberOfBuckets() + (entry * kEntrySize); + } + + Object* KeyAt(int entry) { return get(EntryToIndex(entry)); } + + static const int kNotFound = -1; + static const int kMinCapacity = 4; + + private: + static Handle<Derived> Rehash(Handle<Derived> table, int new_capacity); + + void SetNumberOfBuckets(int num) { + set(kNumberOfBucketsIndex, Smi::FromInt(num)); + } + + void SetNumberOfElements(int num) { + set(kNumberOfElementsIndex, Smi::FromInt(num)); + } + + void SetNumberOfDeletedElements(int num) { + set(kNumberOfDeletedElementsIndex, Smi::FromInt(num)); + } + + int Capacity() { + return NumberOfBuckets() * kLoadFactor; + } + + // Returns the next entry for the given entry. + int ChainAt(int entry) { + return Smi::cast(get(EntryToIndex(entry) + kChainOffset))->value(); + } + + int HashToBucket(int hash) { + return hash & (NumberOfBuckets() - 1); + } + + int HashToEntry(int hash) { + int bucket = HashToBucket(hash); + return Smi::cast(get(kHashTableStartIndex + bucket))->value(); + } + + static const int kNumberOfBucketsIndex = 0; + static const int kNumberOfElementsIndex = kNumberOfBucketsIndex + 1; + static const int kNumberOfDeletedElementsIndex = kNumberOfElementsIndex + 1; + static const int kIteratorsIndex = kNumberOfDeletedElementsIndex + 1; + static const int kHashTableStartIndex = kIteratorsIndex + 1; + + static const int kEntrySize = entrysize + 1; + static const int kChainOffset = entrysize; + + static const int kLoadFactor = 2; + static const int kMaxCapacity = + (FixedArray::kMaxLength - kHashTableStartIndex) + / (1 + (kEntrySize * kLoadFactor)); +}; + + +class JSSetIterator; + + +class OrderedHashSet: public OrderedHashTable< + OrderedHashSet, JSSetIterator, 1> { + public: + static OrderedHashSet* cast(Object* obj) { + ASSERT(obj->IsOrderedHashTable()); + return reinterpret_cast<OrderedHashSet*>(obj); + } + + bool Contains(Handle<Object> key); + static Handle<OrderedHashSet> Add( + Handle<OrderedHashSet> table, Handle<Object> key); + static Handle<OrderedHashSet> Remove( + Handle<OrderedHashSet> table, Handle<Object> key); +}; + + +class JSMapIterator; + + +class OrderedHashMap:public OrderedHashTable< + OrderedHashMap, JSMapIterator, 2> { + public: + static OrderedHashMap* cast(Object* obj) { + ASSERT(obj->IsOrderedHashTable()); + return reinterpret_cast<OrderedHashMap*>(obj); + } + + Object* Lookup(Handle<Object> key); + static Handle<OrderedHashMap> Put( + Handle<OrderedHashMap> table, + Handle<Object> key, + Handle<Object> value); + + private: + Object* ValueAt(int entry) { + return get(EntryToIndex(entry) + kValueOffset); + } + + static const int kValueOffset = 1; +}; + + template <int entrysize> -class WeakHashTableShape : public BaseShape<Object*> { +class WeakHashTableShape : public BaseShape<Handle<Object> > { public: - static inline bool IsMatch(Object* key, Object* other); - static inline uint32_t Hash(Object* key); - static inline uint32_t HashForObject(Object* key, Object* object); - MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap, - Object* key); + static inline bool IsMatch(Handle<Object> key, Object* other); + static inline uint32_t Hash(Handle<Object> key); + static inline uint32_t HashForObject(Handle<Object> key, Object* object); + static inline Handle<Object> AsHandle(Isolate* isolate, Handle<Object> key); static const int kPrefixSize = 0; static const int kEntrySize = entrysize; }; @@ -4277,7 +4333,11 @@ class WeakHashTableShape : public BaseShape<Object*> { // WeakHashTable maps keys that are arbitrary objects to object values. // It is used for the global weak hash table that maps objects // embedded in optimized code to dependent code lists. -class WeakHashTable: public HashTable<WeakHashTableShape<2>, Object*> { +class WeakHashTable: public HashTable<WeakHashTable, + WeakHashTableShape<2>, + Handle<Object> > { + typedef HashTable< + WeakHashTable, WeakHashTableShape<2>, Handle<Object> > DerivedHashTable; public: static inline WeakHashTable* cast(Object* obj) { ASSERT(obj->IsHashTable()); @@ -4286,11 +4346,13 @@ class WeakHashTable: public HashTable<WeakHashTableShape<2>, Object*> { // Looks up the value associated with the given key. The hole value is // returned in case the key is not present. - Object* Lookup(Object* key); + Object* Lookup(Handle<Object> key); // Adds (or overwrites) the value associated with the given key. Mapping a // key to the hole value causes removal of the whole entry. - MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value); + MUST_USE_RESULT static Handle<WeakHashTable> Put(Handle<WeakHashTable> table, + Handle<Object> key, + Handle<Object> value); // This function is called when heap verification is turned on. void Zap(Object* value) { @@ -4304,7 +4366,7 @@ class WeakHashTable: public HashTable<WeakHashTableShape<2>, Object*> { private: friend class MarkCompactCollector; - void AddEntry(int entry, Object* key, Object* value); + void AddEntry(int entry, Handle<Object> key, Handle<Object> value); // Returns the index to the value of an entry. static inline int EntryToValueIndex(int entry) { @@ -4421,6 +4483,10 @@ class ScopeInfo : public FixedArray { // Return the initialization flag of the given context local. InitializationFlag ContextLocalInitFlag(int var); + // Return true if this local was introduced by the compiler, and should not be + // exposed to the user in a debugger. + bool LocalIsSynthetic(int var); + // Lookup support for serialized scope info. Returns the // the stack slot index for a given slot name if the slot is // present; otherwise returns a value < 0. The name must be an internalized @@ -4432,9 +4498,10 @@ class ScopeInfo : public FixedArray { // returns a value < 0. The name must be an internalized string. // If the slot is present and mode != NULL, sets *mode to the corresponding // mode for that variable. - int ContextSlotIndex(String* name, - VariableMode* mode, - InitializationFlag* init_flag); + static int ContextSlotIndex(Handle<ScopeInfo> scope_info, + Handle<String> name, + VariableMode* mode, + InitializationFlag* init_flag); // Lookup support for serialized scope info. Returns the // parameter index for a given parameter name if the parameter is present; @@ -4560,18 +4627,27 @@ class ScopeInfo : public FixedArray { // needs very limited number of distinct normalized maps. class NormalizedMapCache: public FixedArray { public: - static const int kEntries = 64; + static Handle<NormalizedMapCache> New(Isolate* isolate); - static Handle<Map> Get(Handle<NormalizedMapCache> cache, - Handle<JSObject> object, - PropertyNormalizationMode mode); + MUST_USE_RESULT MaybeHandle<Map> Get(Handle<Map> fast_map, + PropertyNormalizationMode mode); + void Set(Handle<Map> fast_map, Handle<Map> normalized_map); void Clear(); // Casting static inline NormalizedMapCache* cast(Object* obj); + static inline bool IsNormalizedMapCache(Object* obj); DECLARE_VERIFIER(NormalizedMapCache) + private: + static const int kEntries = 64; + + static inline int GetIndex(Handle<Map> map); + + // The following declarations hide base class methods. + Object* get(int index); + void set(int index, Object* value); }; @@ -4638,6 +4714,9 @@ class FreeSpace: public HeapObject { inline int size(); inline void set_size(int value); + inline int nobarrier_size(); + inline void nobarrier_set_size(int value); + inline int Size() { return size(); } // Casting. @@ -4723,13 +4802,12 @@ class ExternalUint8ClampedArray: public ExternalArray { // Setter and getter. inline uint8_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalUint8ClampedArray> array, + int index); inline void set(int index, uint8_t value); - // This accessor applies the correct conversion from Smi, HeapNumber and - // undefined and clamps the converted value between 0 and 255. - Object* SetValue(uint32_t index, Object* value); - + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined and clamps the converted value between 0 and 255. static Handle<Object> SetValue(Handle<ExternalUint8ClampedArray> array, uint32_t index, Handle<Object> value); @@ -4750,17 +4828,15 @@ class ExternalInt8Array: public ExternalArray { public: // Setter and getter. inline int8_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalInt8Array> array, int index); inline void set(int index, int8_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalInt8Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalInt8Array* cast(Object* obj); @@ -4777,17 +4853,15 @@ class ExternalUint8Array: public ExternalArray { public: // Setter and getter. inline uint8_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalUint8Array> array, int index); inline void set(int index, uint8_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalUint8Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalUint8Array* cast(Object* obj); @@ -4804,17 +4878,15 @@ class ExternalInt16Array: public ExternalArray { public: // Setter and getter. inline int16_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalInt16Array> array, int index); inline void set(int index, int16_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalInt16Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalInt16Array* cast(Object* obj); @@ -4831,17 +4903,16 @@ class ExternalUint16Array: public ExternalArray { public: // Setter and getter. inline uint16_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalUint16Array> array, + int index); inline void set(int index, uint16_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalUint16Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalUint16Array* cast(Object* obj); @@ -4858,17 +4929,15 @@ class ExternalInt32Array: public ExternalArray { public: // Setter and getter. inline int32_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalInt32Array> array, int index); inline void set(int index, int32_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalInt32Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalInt32Array* cast(Object* obj); @@ -4885,17 +4954,16 @@ class ExternalUint32Array: public ExternalArray { public: // Setter and getter. inline uint32_t get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalUint32Array> array, + int index); inline void set(int index, uint32_t value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalUint32Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalUint32Array* cast(Object* obj); @@ -4912,17 +4980,16 @@ class ExternalFloat32Array: public ExternalArray { public: // Setter and getter. inline float get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalFloat32Array> array, + int index); inline void set(int index, float value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalFloat32Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalFloat32Array* cast(Object* obj); @@ -4939,17 +5006,16 @@ class ExternalFloat64Array: public ExternalArray { public: // Setter and getter. inline double get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<ExternalFloat64Array> array, + int index); inline void set(int index, double value); + // This accessor applies the correct conversion from Smi, HeapNumber + // and undefined. static Handle<Object> SetValue(Handle<ExternalFloat64Array> array, uint32_t index, Handle<Object> value); - // This accessor applies the correct conversion from Smi, HeapNumber - // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - // Casting. static inline ExternalFloat64Array* cast(Object* obj); @@ -4999,7 +5065,7 @@ class FixedTypedArray: public FixedTypedArrayBase { } inline ElementType get_scalar(int index); - MUST_USE_RESULT inline MaybeObject* get(int index); + static inline Handle<Object> get(Handle<FixedTypedArray> array, int index); inline void set(int index, ElementType value); static inline ElementType from_int(int value); @@ -5007,8 +5073,6 @@ class FixedTypedArray: public FixedTypedArrayBase { // This accessor applies the correct conversion from Smi, HeapNumber // and undefined. - MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value); - static Handle<Object> SetValue(Handle<FixedTypedArray<Traits> > array, uint32_t index, Handle<Object> value); @@ -5026,7 +5090,8 @@ class FixedTypedArray: public FixedTypedArrayBase { typedef elementType ElementType; \ static const InstanceType kInstanceType = FIXED_##TYPE##_ARRAY_TYPE; \ static const char* Designator() { return #type " array"; } \ - static inline MaybeObject* ToObject(Heap* heap, elementType scalar); \ + static inline Handle<Object> ToHandle(Isolate* isolate, \ + elementType scalar); \ static inline elementType defaultValue(); \ }; \ \ @@ -5110,9 +5175,9 @@ class DeoptimizationInputData: public FixedArray { } // Allocates a DeoptimizationInputData. - MUST_USE_RESULT static MaybeObject* Allocate(Isolate* isolate, - int deopt_entry_count, - PretenureFlag pretenure); + static Handle<DeoptimizationInputData> New(Isolate* isolate, + int deopt_entry_count, + PretenureFlag pretenure); // Casting. static inline DeoptimizationInputData* cast(Object* obj); @@ -5157,9 +5222,9 @@ class DeoptimizationOutputData: public FixedArray { } // Allocates a DeoptimizationOutputData. - MUST_USE_RESULT static MaybeObject* Allocate(Isolate* isolate, - int number_of_deopt_points, - PretenureFlag pretenure); + static Handle<DeoptimizationOutputData> New(Isolate* isolate, + int number_of_deopt_points, + PretenureFlag pretenure); // Casting. static inline DeoptimizationOutputData* cast(Object* obj); @@ -5194,6 +5259,7 @@ class Code: public HeapObject { #define IC_KIND_LIST(V) \ V(LOAD_IC) \ V(KEYED_LOAD_IC) \ + V(CALL_IC) \ V(STORE_IC) \ V(KEYED_STORE_IC) \ V(BINARY_OP_IC) \ @@ -5303,12 +5369,24 @@ class Code: public HeapObject { inline bool is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; } inline bool is_store_stub() { return kind() == STORE_IC; } inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; } + inline bool is_call_stub() { return kind() == CALL_IC; } inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; } inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; } inline bool is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; } inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; } inline bool is_keyed_stub(); inline bool is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; } + inline bool is_weak_stub(); + inline void mark_as_weak_stub(); + inline bool is_invalidated_weak_stub(); + inline void mark_as_invalidated_weak_stub(); + + inline bool CanBeWeakStub() { + Kind k = kind(); + return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC || + k == KEYED_STORE_IC || k == COMPARE_NIL_IC) && + ic_state() == MONOMORPHIC; + } inline void set_raw_kind_specific_flags1(int value); inline void set_raw_kind_specific_flags2(int value); @@ -5402,7 +5480,6 @@ class Code: public HeapObject { // Find the first map in an IC stub. Map* FindFirstMap(); void FindAllMaps(MapHandleList* maps); - void FindAllTypes(TypeHandleList* types); // Find the first handler in an IC stub. Code* FindFirstHandler(); @@ -5523,8 +5600,6 @@ class Code: public HeapObject { void ClearInlineCaches(); void ClearInlineCaches(Kind kind); - void ClearTypeFeedbackInfo(Heap* heap); - BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset); uint32_t TranslateAstIdToPcOffset(BailoutId ast_id); @@ -5550,7 +5625,7 @@ class Code: public HeapObject { static void MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate); static void MarkCodeAsExecuted(byte* sequence, Isolate* isolate); void MakeOlder(MarkingParity); - static bool IsYoungSequence(byte* sequence); + static bool IsYoungSequence(Isolate* isolate, byte* sequence); bool IsOld(); Age GetAge(); // Gets the raw code age, including psuedo code-age values such as @@ -5567,11 +5642,17 @@ class Code: public HeapObject { void VerifyEmbeddedObjectsDependency(); #endif + inline bool CanContainWeakObjects() { + return is_optimized_code() || is_weak_stub(); + } + inline bool IsWeakObject(Object* object) { - return is_optimized_code() && IsWeakObjectInOptimizedCode(object); + return (is_optimized_code() && IsWeakObjectInOptimizedCode(object)) || + (is_weak_stub() && IsWeakObjectInIC(object)); } - inline bool IsWeakObjectInOptimizedCode(Object* object); + static inline bool IsWeakObjectInOptimizedCode(Object* object); + static inline bool IsWeakObjectInIC(Object* object); // Max loop nesting marker used to postpose OSR. We don't take loop // nesting that is deeper than 5 levels into account. @@ -5634,11 +5715,17 @@ class Code: public HeapObject { static const int kMarkedForDeoptimizationFirstBit = kStackSlotsFirstBit + kStackSlotsBitCount + 1; static const int kMarkedForDeoptimizationBitCount = 1; + static const int kWeakStubFirstBit = + kMarkedForDeoptimizationFirstBit + kMarkedForDeoptimizationBitCount; + static const int kWeakStubBitCount = 1; + static const int kInvalidatedWeakStubFirstBit = + kWeakStubFirstBit + kWeakStubBitCount; + static const int kInvalidatedWeakStubBitCount = 1; STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32); STATIC_ASSERT(kHasFunctionCacheFirstBit + kHasFunctionCacheBitCount <= 32); - STATIC_ASSERT(kMarkedForDeoptimizationFirstBit + - kMarkedForDeoptimizationBitCount <= 32); + STATIC_ASSERT(kInvalidatedWeakStubFirstBit + + kInvalidatedWeakStubBitCount <= 32); class StackSlotsField: public BitField<int, kStackSlotsFirstBit, kStackSlotsBitCount> {}; // NOLINT @@ -5647,6 +5734,12 @@ class Code: public HeapObject { class MarkedForDeoptimizationField: public BitField<bool, kMarkedForDeoptimizationFirstBit, kMarkedForDeoptimizationBitCount> {}; // NOLINT + class WeakStubField: public BitField<bool, + kWeakStubFirstBit, + kWeakStubBitCount> {}; // NOLINT + class InvalidatedWeakStubField: public BitField<bool, + kInvalidatedWeakStubFirstBit, + kInvalidatedWeakStubBitCount> {}; // NOLINT // KindSpecificFlags2 layout (ALL) static const int kIsCrankshaftedBit = 0; @@ -5694,7 +5787,7 @@ class Code: public HeapObject { byte* FindCodeAgeSequence(); static void GetCodeAgeAndParity(Code* code, Age* age, MarkingParity* parity); - static void GetCodeAgeAndParity(byte* sequence, Age* age, + static void GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity); static Code* GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity); @@ -5731,9 +5824,14 @@ class CompilationInfo; class DependentCode: public FixedArray { public: enum DependencyGroup { + // Group of IC stubs that weakly embed this map and depend on being + // invalidated when the map is garbage collected. Dependent IC stubs form + // a linked list. This group stores only the head of the list. This means + // that the number_of_entries(kWeakICGroup) is 0 or 1. + kWeakICGroup, // Group of code that weakly embed this map and depend on being // deoptimized when the map is garbage collected. - kWeaklyEmbeddedGroup, + kWeakCodeGroup, // Group of code that embed a transition to this map, and depend on being // deoptimized when the transition is replaced by a new version. kTransitionGroup, @@ -5748,6 +5846,9 @@ class DependentCode: public FixedArray { // Group of code that depends on global property values in property cells // not being changed. kPropertyCellChangedGroup, + // Group of code that omit run-time type checks for the field(s) introduced + // by this map. + kFieldTypeGroup, // Group of code that depends on tenuring information in AllocationSites // not being changed. kAllocationSiteTenuringChangedGroup, @@ -5784,6 +5885,7 @@ class DependentCode: public FixedArray { bool MarkCodeForDeoptimization(Isolate* isolate, DependentCode::DependencyGroup group); + void AddToDependentICList(Handle<Code> stub); // The following low-level accessors should only be used by this class // and the mark compact collector. @@ -6000,29 +6102,16 @@ class Map: public HeapObject { inline bool HasTransitionArray(); inline bool HasElementsTransition(); inline Map* elements_transition_map(); - MUST_USE_RESULT inline MaybeObject* set_elements_transition_map( - Map* transitioned_map); - inline void SetTransition(int transition_index, Map* target); + static Handle<TransitionArray> SetElementsTransitionMap( + Handle<Map> map, Handle<Map> transitioned_map); inline Map* GetTransition(int transition_index); + inline int SearchTransition(Name* name); + inline FixedArrayBase* GetInitialElements(); - static Handle<TransitionArray> AddTransition(Handle<Map> map, - Handle<Name> key, - Handle<Map> target, - SimpleTransitionFlag flag); - - MUST_USE_RESULT inline MaybeObject* AddTransition(Name* key, - Map* target, - SimpleTransitionFlag flag); DECL_ACCESSORS(transitions, TransitionArray) - inline void ClearTransitions(Heap* heap, - WriteBarrierMode mode = UPDATE_WRITE_BARRIER); - - void DeprecateTransitionTree(); - void DeprecateTarget(Name* key, DescriptorArray* new_descriptors); Map* FindRootMap(); - Map* FindUpdatedMap(int verbatim, int length, DescriptorArray* descriptors); - Map* FindLastMatchMap(int verbatim, int length, DescriptorArray* descriptors); + Map* FindFieldOwner(int descriptor); inline int GetInObjectPropertyOffset(int index); @@ -6032,13 +6121,19 @@ class Map: public HeapObject { int target_number_of_fields, int target_inobject, int target_unused); - static Handle<Map> GeneralizeAllFieldRepresentations( - Handle<Map> map, - Representation new_representation); + static Handle<Map> GeneralizeAllFieldRepresentations(Handle<Map> map); + static Handle<HeapType> GeneralizeFieldType(Handle<HeapType> type1, + Handle<HeapType> type2, + Isolate* isolate) + V8_WARN_UNUSED_RESULT; + static void GeneralizeFieldType(Handle<Map> map, + int modify_index, + Handle<HeapType> new_field_type); static Handle<Map> GeneralizeRepresentation( Handle<Map> map, int modify_index, Representation new_representation, + Handle<HeapType> new_field_type, StoreMode store_mode); static Handle<Map> CopyGeneralizeAllRepresentations( Handle<Map> map, @@ -6047,14 +6142,7 @@ class Map: public HeapObject { PropertyAttributes attributes, const char* reason); - void PrintGeneralization(FILE* file, - const char* reason, - int modify_index, - int split, - int descriptors, - bool constant_to_field, - Representation old_representation, - Representation new_representation); + static Handle<Map> Normalize(Handle<Map> map, PropertyNormalizationMode mode); // Returns the constructor name (the name (possibly, inferred name) of the // function that was used to instantiate the object). @@ -6100,7 +6188,7 @@ class Map: public HeapObject { // [stub cache]: contains stubs compiled for this map. DECL_ACCESSORS(code_cache, Object) - // [dependent code]: list of optimized codes that have this map embedded. + // [dependent code]: list of optimized codes that weakly embed this map. DECL_ACCESSORS(dependent_code, DependentCode) // [back pointer]: points back to the parent map from which a transition @@ -6121,13 +6209,8 @@ class Map: public HeapObject { // 2 + 2 * i: prototype // 3 + 2 * i: target map inline FixedArray* GetPrototypeTransitions(); - MUST_USE_RESULT inline MaybeObject* SetPrototypeTransitions( - FixedArray* prototype_transitions); inline bool HasPrototypeTransitions(); - inline HeapObject* UncheckedPrototypeTransitions(); - inline TransitionArray* unchecked_transition_array(); - static const int kProtoTransitionHeaderSize = 1; static const int kProtoTransitionNumberOfEntriesOffset = 0; static const int kProtoTransitionElementsPerEntry = 2; @@ -6213,60 +6296,59 @@ class Map: public HeapObject { // is found by re-transitioning from the root of the transition tree using the // descriptor array of the map. Returns NULL if no updated map is found. // This method also applies any pending migrations along the prototype chain. - static Handle<Map> CurrentMapForDeprecated(Handle<Map> map); + static MaybeHandle<Map> CurrentMapForDeprecated(Handle<Map> map) + V8_WARN_UNUSED_RESULT; // Same as above, but does not touch the prototype chain. - static Handle<Map> CurrentMapForDeprecatedInternal(Handle<Map> map); + static MaybeHandle<Map> CurrentMapForDeprecatedInternal(Handle<Map> map) + V8_WARN_UNUSED_RESULT; - static Handle<Map> RawCopy(Handle<Map> map, int instance_size); - MUST_USE_RESULT MaybeObject* RawCopy(int instance_size); - MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors(); static Handle<Map> CopyDropDescriptors(Handle<Map> map); - MUST_USE_RESULT MaybeObject* CopyDropDescriptors(); - static Handle<Map> CopyReplaceDescriptors(Handle<Map> map, - Handle<DescriptorArray> descriptors, - TransitionFlag flag, - Handle<Name> name); - MUST_USE_RESULT MaybeObject* CopyReplaceDescriptors( - DescriptorArray* descriptors, - TransitionFlag flag, - Name* name = NULL, - SimpleTransitionFlag simple_flag = FULL_TRANSITION); - static Handle<Map> CopyInstallDescriptors( + static Handle<Map> CopyInsertDescriptor(Handle<Map> map, + Descriptor* descriptor, + TransitionFlag flag); + static Handle<Map> CopyReplaceDescriptor(Handle<Map> map, + Handle<DescriptorArray> descriptors, + Descriptor* descriptor, + int index, + TransitionFlag flag); + + MUST_USE_RESULT static MaybeHandle<Map> CopyWithField( Handle<Map> map, - int new_descriptor, - Handle<DescriptorArray> descriptors); - MUST_USE_RESULT MaybeObject* ShareDescriptor(DescriptorArray* descriptors, - Descriptor* descriptor); - MUST_USE_RESULT MaybeObject* CopyAddDescriptor(Descriptor* descriptor, - TransitionFlag flag); - MUST_USE_RESULT MaybeObject* CopyInsertDescriptor(Descriptor* descriptor, - TransitionFlag flag); - MUST_USE_RESULT MaybeObject* CopyReplaceDescriptor( - DescriptorArray* descriptors, - Descriptor* descriptor, - int index, + Handle<Name> name, + Handle<HeapType> type, + PropertyAttributes attributes, + Representation representation, + TransitionFlag flag); + + MUST_USE_RESULT static MaybeHandle<Map> CopyWithConstant( + Handle<Map> map, + Handle<Name> name, + Handle<Object> constant, + PropertyAttributes attributes, TransitionFlag flag); - MUST_USE_RESULT MaybeObject* AsElementsKind(ElementsKind kind); + // Returns a new map with all transitions dropped from the given map and + // the ElementsKind set. + static Handle<Map> TransitionElementsTo(Handle<Map> map, + ElementsKind to_kind); static Handle<Map> AsElementsKind(Handle<Map> map, ElementsKind kind); - MUST_USE_RESULT MaybeObject* CopyAsElementsKind(ElementsKind kind, - TransitionFlag flag); + static Handle<Map> CopyAsElementsKind(Handle<Map> map, + ElementsKind kind, + TransitionFlag flag); static Handle<Map> CopyForObserved(Handle<Map> map); - static Handle<Map> CopyNormalized(Handle<Map> map, - PropertyNormalizationMode mode, - NormalizedMapSharingMode sharing); + static Handle<Map> CopyForFreeze(Handle<Map> map); - inline void AppendDescriptor(Descriptor* desc, - const DescriptorArray::WhitenessWitness&); + inline void AppendDescriptor(Descriptor* desc); // Returns a copy of the map, with all transitions dropped from the // instance descriptors. static Handle<Map> Copy(Handle<Map> map); - MUST_USE_RESULT MaybeObject* Copy(); + static Handle<Map> Create(Handle<JSFunction> constructor, + int extra_inobject_properties); // Returns the next free property index (only valid for FAST MODE). int NextFreePropertyIndex(); @@ -6286,9 +6368,6 @@ class Map: public HeapObject { // Casting. static inline Map* cast(Object* obj); - // Locate an accessor in the instance descriptor. - AccessorDescriptor* FindAccessor(Name* name); - // Code cache operations. // Clears the code cache. @@ -6298,7 +6377,6 @@ class Map: public HeapObject { static void UpdateCodeCache(Handle<Map> map, Handle<Name> name, Handle<Code> code); - MUST_USE_RESULT MaybeObject* UpdateCodeCache(Name* name, Code* code); // Extend the descriptor array of the map with the list of descriptors. // In case of duplicates, the latest descriptor is used. @@ -6325,14 +6403,6 @@ class Map: public HeapObject { // Computes a hash value for this map, to be used in HashTables and such. int Hash(); - bool EquivalentToForTransition(Map* other); - - // Compares this map to another to see if they describe equivalent objects. - // If |mode| is set to CLEAR_INOBJECT_PROPERTIES, |other| is treated as if - // it had exactly zero inobject properties. - // The "shared" flags of both this map and |other| are ignored. - bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode); - // Returns the map that this map transitions to if its elements_kind // is changed to |elements_kind|, or NULL if no such map is cached yet. // |safe_to_add_transitions| is set to false if adding transitions is not @@ -6345,15 +6415,6 @@ class Map: public HeapObject { Handle<Map> FindTransitionedMap(MapHandleList* candidates); Map* FindTransitionedMap(MapList* candidates); - // Zaps the contents of backing data structures. Note that the - // heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects - // holding weak references when incremental marking is used, because it also - // iterates over objects that are otherwise unreachable. - // In general we only want to call these functions in release mode when - // heap verification is turned on. - void ZapPrototypeTransitions(); - void ZapTransitions(); - bool CanTransition() { // Only JSObject and subtypes have map transitions and back pointers. STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); @@ -6374,18 +6435,17 @@ class Map: public HeapObject { return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE; } - // Fires when the layout of an object with a leaf map changes. - // This includes adding transitions to the leaf map or changing - // the descriptor array. - inline void NotifyLeafMapLayoutChange(); - inline bool CanOmitMapChecks(); - void AddDependentCompilationInfo(DependentCode::DependencyGroup group, - CompilationInfo* info); + static void AddDependentCompilationInfo(Handle<Map> map, + DependentCode::DependencyGroup group, + CompilationInfo* info); - void AddDependentCode(DependentCode::DependencyGroup group, - Handle<Code> code); + static void AddDependentCode(Handle<Map> map, + DependentCode::DependencyGroup group, + Handle<Code> code); + static void AddDependentIC(Handle<Map> map, + Handle<Code> stub); bool IsMapInArrayPrototypeChain(); @@ -6413,11 +6473,8 @@ class Map: public HeapObject { // transitions are in the form of a map where the keys are prototype objects // and the values are the maps the are transitioned to. static const int kMaxCachedPrototypeTransitions = 256; - static Handle<Map> GetPrototypeTransition(Handle<Map> map, - Handle<Object> prototype); - static Handle<Map> PutPrototypeTransition(Handle<Map> map, - Handle<Object> prototype, - Handle<Map> target_map); + static Handle<Map> TransitionToPrototype(Handle<Map> map, + Handle<Object> prototype); static const int kMaxPreAllocatedPropertyFields = 255; @@ -6501,7 +6558,81 @@ class Map: public HeapObject { kPointerFieldsEndOffset, kSize> BodyDescriptor; + // Compares this map to another to see if they describe equivalent objects. + // If |mode| is set to CLEAR_INOBJECT_PROPERTIES, |other| is treated as if + // it had exactly zero inobject properties. + // The "shared" flags of both this map and |other| are ignored. + bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode); + private: + bool EquivalentToForTransition(Map* other); + static Handle<Map> RawCopy(Handle<Map> map, int instance_size); + static Handle<Map> ShareDescriptor(Handle<Map> map, + Handle<DescriptorArray> descriptors, + Descriptor* descriptor); + static Handle<Map> CopyInstallDescriptors( + Handle<Map> map, + int new_descriptor, + Handle<DescriptorArray> descriptors); + static Handle<Map> CopyAddDescriptor(Handle<Map> map, + Descriptor* descriptor, + TransitionFlag flag); + static Handle<Map> CopyReplaceDescriptors( + Handle<Map> map, + Handle<DescriptorArray> descriptors, + TransitionFlag flag, + MaybeHandle<Name> maybe_name, + SimpleTransitionFlag simple_flag = FULL_TRANSITION); + + static Handle<Map> CopyNormalized(Handle<Map> map, + PropertyNormalizationMode mode, + NormalizedMapSharingMode sharing); + + // Fires when the layout of an object with a leaf map changes. + // This includes adding transitions to the leaf map or changing + // the descriptor array. + inline void NotifyLeafMapLayoutChange(); + + static Handle<Map> TransitionElementsToSlow(Handle<Map> object, + ElementsKind to_kind); + + // Zaps the contents of backing data structures. Note that the + // heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects + // holding weak references when incremental marking is used, because it also + // iterates over objects that are otherwise unreachable. + // In general we only want to call these functions in release mode when + // heap verification is turned on. + void ZapPrototypeTransitions(); + void ZapTransitions(); + + void DeprecateTransitionTree(); + void DeprecateTarget(Name* key, DescriptorArray* new_descriptors); + + Map* FindLastMatchMap(int verbatim, int length, DescriptorArray* descriptors); + + void UpdateDescriptor(int descriptor_number, Descriptor* desc); + + void PrintGeneralization(FILE* file, + const char* reason, + int modify_index, + int split, + int descriptors, + bool constant_to_field, + Representation old_representation, + Representation new_representation, + HeapType* old_field_type, + HeapType* new_field_type); + + static inline void SetPrototypeTransitions( + Handle<Map> map, + Handle<FixedArray> prototype_transitions); + + static Handle<Map> GetPrototypeTransition(Handle<Map> map, + Handle<Object> prototype); + static Handle<Map> PutPrototypeTransition(Handle<Map> map, + Handle<Object> prototype, + Handle<Map> target_map); + DISALLOW_IMPLICIT_CONSTRUCTORS(Map); }; @@ -6619,6 +6750,22 @@ class Script: public Struct { // resource is accessible. Otherwise, always return true. inline bool HasValidSource(); + // Convert code position into column number. + static int GetColumnNumber(Handle<Script> script, int code_pos); + + // Convert code position into (zero-based) line number. + // The non-handlified version does not allocate, but may be much slower. + static int GetLineNumber(Handle<Script> script, int code_pos); + int GetLineNumber(int code_pos); + + static Handle<Object> GetNameOrSourceURL(Handle<Script> script); + + // Init line_ends array with code positions of line ends inside script source. + static void InitLineEnds(Handle<Script> script); + + // Get the JS object wrapping the given script; create it if none exists. + static Handle<JSObject> GetWrapper(Handle<Script> script); + // Dispatched behavior. DECLARE_PRINTER(Script) DECLARE_VERIFIER(Script) @@ -6640,6 +6787,8 @@ class Script: public Struct { static const int kSize = kFlagsOffset + kPointerSize; private: + int GetLineNumberWithArray(int code_pos); + // Bit positions in the flags field. static const int kCompilationTypeBit = 0; static const int kCompilationStateBit = 1; @@ -6723,14 +6872,12 @@ class SharedFunctionInfo: public HeapObject { // Removed a specific optimized code object from the optimized code map. void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason); + void ClearTypeFeedbackInfo(); + // Trims the optimized code map after entries have been removed. void TrimOptimizedCodeMap(int shrink_by); // Add a new entry to the optimized code map. - MUST_USE_RESULT MaybeObject* AddToOptimizedCodeMap(Context* native_context, - Code* code, - FixedArray* literals, - BailoutId osr_ast_id); static void AddToOptimizedCodeMap(Handle<SharedFunctionInfo> shared, Handle<Context> native_context, Handle<Code> code, @@ -6831,6 +6978,12 @@ class SharedFunctionInfo: public HeapObject { inline int construction_count(); inline void set_construction_count(int value); + // [feedback_vector] - accumulates ast node feedback from full-codegen and + // (increasingly) from crankshafted code where sufficient feedback isn't + // available. Currently the field is duplicated in + // TypeFeedbackInfo::feedback_vector, but the allocation is done here. + DECL_ACCESSORS(feedback_vector, FixedArray) + // [initial_map]: initial map of the first function called as a constructor. // Saved for the duration of the tracking phase. // This is a weak link (GC resets it to undefined_value if no other live @@ -6940,6 +7093,7 @@ class SharedFunctionInfo: public HeapObject { inline void set_ast_node_count(int count); inline int profiler_ticks(); + inline void set_profiler_ticks(int ticks); // Inline cache age is used to infer whether the function survived a context // disposal or not. In the former case we reset the opt_count. @@ -7111,16 +7265,14 @@ class SharedFunctionInfo: public HeapObject { static const int kScriptOffset = kFunctionDataOffset + kPointerSize; static const int kDebugInfoOffset = kScriptOffset + kPointerSize; static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize; - static const int kInitialMapOffset = + static const int kFeedbackVectorOffset = kInferredNameOffset + kPointerSize; - // ast_node_count is a Smi field. It could be grouped with another Smi field - // into a PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available. - static const int kAstNodeCountOffset = - kInitialMapOffset + kPointerSize; + static const int kInitialMapOffset = + kFeedbackVectorOffset + kPointerSize; #if V8_HOST_ARCH_32_BIT // Smi fields. static const int kLengthOffset = - kAstNodeCountOffset + kPointerSize; + kInitialMapOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize; static const int kExpectedNofPropertiesOffset = kFormalParameterCountOffset + kPointerSize; @@ -7138,9 +7290,13 @@ class SharedFunctionInfo: public HeapObject { kCompilerHintsOffset + kPointerSize; static const int kCountersOffset = kOptCountAndBailoutReasonOffset + kPointerSize; + static const int kAstNodeCountOffset = + kCountersOffset + kPointerSize; + static const int kProfilerTicksOffset = + kAstNodeCountOffset + kPointerSize; // Total size. - static const int kSize = kCountersOffset + kPointerSize; + static const int kSize = kProfilerTicksOffset + kPointerSize; #else // The only reason to use smi fields instead of int fields // is to allow iteration without maps decoding during @@ -7152,7 +7308,7 @@ class SharedFunctionInfo: public HeapObject { // word is not set and thus this word cannot be treated as pointer // to HeapObject during old space traversal. static const int kLengthOffset = - kAstNodeCountOffset + kPointerSize; + kInitialMapOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kIntSize; @@ -7173,21 +7329,25 @@ class SharedFunctionInfo: public HeapObject { static const int kOptCountAndBailoutReasonOffset = kCompilerHintsOffset + kIntSize; - static const int kCountersOffset = kOptCountAndBailoutReasonOffset + kIntSize; + static const int kAstNodeCountOffset = + kCountersOffset + kIntSize; + static const int kProfilerTicksOffset = + kAstNodeCountOffset + kIntSize; + // Total size. - static const int kSize = kCountersOffset + kIntSize; + static const int kSize = kProfilerTicksOffset + kIntSize; #endif // The construction counter for inobject slack tracking is stored in the // most significant byte of compiler_hints which is otherwise unused. // Its offset depends on the endian-ness of the architecture. -#if __BYTE_ORDER == __LITTLE_ENDIAN +#if defined(V8_TARGET_LITTLE_ENDIAN) static const int kConstructionCountOffset = kCompilerHintsOffset + 3; -#elif __BYTE_ORDER == __BIG_ENDIAN +#elif defined(V8_TARGET_BIG_ENDIAN) static const int kConstructionCountOffset = kCompilerHintsOffset + 0; #else #error Unknown byte ordering @@ -7261,12 +7421,12 @@ class SharedFunctionInfo: public HeapObject { static const int kNativeBitWithinByte = (kNative + kCompilerHintsSmiTagSize) % kBitsPerByte; -#if __BYTE_ORDER == __LITTLE_ENDIAN +#if defined(V8_TARGET_LITTLE_ENDIAN) static const int kStrictModeByteOffset = kCompilerHintsOffset + (kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte; static const int kNativeByteOffset = kCompilerHintsOffset + (kNative + kCompilerHintsSmiTagSize) / kBitsPerByte; -#elif __BYTE_ORDER == __BIG_ENDIAN +#elif defined(V8_TARGET_BIG_ENDIAN) static const int kStrictModeByteOffset = kCompilerHintsOffset + (kCompilerHintsSize - 1) - ((kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte); @@ -7300,6 +7460,7 @@ class JSGeneratorObject: public JSObject { // cannot be resumed. inline int continuation(); inline void set_continuation(int continuation); + inline bool is_suspended(); // [operand_stack]: Saved operand stack. DECL_ACCESSORS(operand_stack, FixedArray) @@ -7334,7 +7495,7 @@ class JSGeneratorObject: public JSObject { enum ResumeMode { NEXT, THROW }; // Yielding from a generator returns an object with the following inobject - // properties. See Context::generator_result_map() for the map. + // properties. See Context::iterator_result_map() for the map. static const int kResultValuePropertyIndex = 0; static const int kResultDonePropertyIndex = 1; static const int kResultPropertyCount = 2; @@ -7467,7 +7628,7 @@ class JSFunction: public JSObject { // After prototype is removed, it will not be created when accessed, and // [[Construct]] from this function will not be allowed. - void RemovePrototype(); + bool RemovePrototype(); inline bool should_have_prototype(); // Accessor for this function's initial map's [[class]] @@ -7554,6 +7715,9 @@ class JSGlobalProxy : public JSObject { // It is null value if this object is not used by any context. DECL_ACCESSORS(native_context, Object) + // [hash]: The hash code property (undefined if not initialized yet). + DECL_ACCESSORS(hash, Object) + // Casting. static inline JSGlobalProxy* cast(Object* obj); @@ -7565,7 +7729,8 @@ class JSGlobalProxy : public JSObject { // Layout description. static const int kNativeContextOffset = JSObject::kHeaderSize; - static const int kSize = kNativeContextOffset + kPointerSize; + static const int kHashOffset = kNativeContextOffset + kPointerSize; + static const int kSize = kHashOffset + kPointerSize; private: DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalProxy); @@ -7594,15 +7759,6 @@ class GlobalObject: public JSObject { // Retrieve the property cell used to store a property. PropertyCell* GetPropertyCell(LookupResult* result); - // This is like GetProperty, but is used when you know the lookup won't fail - // by throwing an exception. This is for the debug and builtins global - // objects, where it is known which properties can be expected to be present - // on the object. - Object* GetPropertyNoExceptionThrown(Name* key) { - Object* answer = GetProperty(key)->ToObjectUnchecked(); - return answer; - } - // Casting. static inline GlobalObject* cast(Object* obj); @@ -8003,38 +8159,32 @@ class CompilationCacheShape : public BaseShape<HashTableKey*> { return key->HashForObject(object); } - MUST_USE_RESULT static MaybeObject* AsObject(Heap* heap, - HashTableKey* key) { - return key->AsObject(heap); - } + static inline Handle<Object> AsHandle(Isolate* isolate, HashTableKey* key); static const int kPrefixSize = 0; static const int kEntrySize = 2; }; -class CompilationCacheTable: public HashTable<CompilationCacheShape, +class CompilationCacheTable: public HashTable<CompilationCacheTable, + CompilationCacheShape, HashTableKey*> { public: // Find cached value for a string key, otherwise return null. - Object* Lookup(String* src, Context* context); - Object* LookupEval(String* src, - Context* context, - StrictMode strict_mode, - int scope_position); - Object* LookupRegExp(String* source, JSRegExp::Flags flags); - MUST_USE_RESULT MaybeObject* Put(String* src, - Context* context, - Object* value); - MUST_USE_RESULT MaybeObject* PutEval(String* src, - Context* context, - SharedFunctionInfo* value, - int scope_position); - MUST_USE_RESULT MaybeObject* PutRegExp(String* src, - JSRegExp::Flags flags, - FixedArray* value); - - // Remove given value from cache. + Handle<Object> Lookup(Handle<String> src, Handle<Context> context); + Handle<Object> LookupEval(Handle<String> src, Handle<Context> context, + StrictMode strict_mode, int scope_position); + Handle<Object> LookupRegExp(Handle<String> source, JSRegExp::Flags flags); + static Handle<CompilationCacheTable> Put( + Handle<CompilationCacheTable> cache, Handle<String> src, + Handle<Context> context, Handle<Object> value); + static Handle<CompilationCacheTable> PutEval( + Handle<CompilationCacheTable> cache, Handle<String> src, + Handle<Context> context, Handle<SharedFunctionInfo> value, + int scope_position); + static Handle<CompilationCacheTable> PutRegExp( + Handle<CompilationCacheTable> cache, Handle<String> src, + JSRegExp::Flags flags, Handle<FixedArray> value); void Remove(Object* value); static inline CompilationCacheTable* cast(Object* obj); @@ -8050,7 +8200,8 @@ class CodeCache: public Struct { DECL_ACCESSORS(normal_type_cache, Object) // Add the code object to the cache. - MUST_USE_RESULT MaybeObject* Update(Name* name, Code* code); + static void Update( + Handle<CodeCache> cache, Handle<Name> name, Handle<Code> code); // Lookup code object in the cache. Returns code object if found and undefined // if not. @@ -8077,8 +8228,10 @@ class CodeCache: public Struct { static const int kSize = kNormalTypeCacheOffset + kPointerSize; private: - MUST_USE_RESULT MaybeObject* UpdateDefaultCache(Name* name, Code* code); - MUST_USE_RESULT MaybeObject* UpdateNormalTypeCache(Name* name, Code* code); + static void UpdateDefaultCache( + Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code); + static void UpdateNormalTypeCache( + Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code); Object* LookupDefaultCache(Name* name, Code::Flags flags); Object* LookupNormalTypeCache(Name* name, Code::Flags flags); @@ -8106,21 +8259,22 @@ class CodeCacheHashTableShape : public BaseShape<HashTableKey*> { return key->HashForObject(object); } - MUST_USE_RESULT static MaybeObject* AsObject(Heap* heap, - HashTableKey* key) { - return key->AsObject(heap); - } + static inline Handle<Object> AsHandle(Isolate* isolate, HashTableKey* key); static const int kPrefixSize = 0; static const int kEntrySize = 2; }; -class CodeCacheHashTable: public HashTable<CodeCacheHashTableShape, +class CodeCacheHashTable: public HashTable<CodeCacheHashTable, + CodeCacheHashTableShape, HashTableKey*> { public: Object* Lookup(Name* name, Code::Flags flags); - MUST_USE_RESULT MaybeObject* Put(Name* name, Code* code); + static Handle<CodeCacheHashTable> Put( + Handle<CodeCacheHashTable> table, + Handle<Name> name, + Handle<Code> code); int GetIndex(Name* name, Code::Flags flags); void RemoveByIndex(int index); @@ -8144,9 +8298,6 @@ class PolymorphicCodeCache: public Struct { Code::Flags flags, Handle<Code> code); - MUST_USE_RESULT MaybeObject* Update(MapHandleList* maps, - Code::Flags flags, - Code* code); // Returns an undefined value if the entry is not found. Handle<Object> Lookup(MapHandleList* maps, Code::Flags flags); @@ -8166,13 +8317,17 @@ class PolymorphicCodeCache: public Struct { class PolymorphicCodeCacheHashTable - : public HashTable<CodeCacheHashTableShape, HashTableKey*> { + : public HashTable<PolymorphicCodeCacheHashTable, + CodeCacheHashTableShape, + HashTableKey*> { public: Object* Lookup(MapHandleList* maps, int code_kind); - MUST_USE_RESULT MaybeObject* Put(MapHandleList* maps, - int code_kind, - Code* code); + static Handle<PolymorphicCodeCacheHashTable> Put( + Handle<PolymorphicCodeCacheHashTable> hash_table, + MapHandleList* maps, + int code_kind, + Handle<Code> code); static inline PolymorphicCodeCacheHashTable* cast(Object* obj); @@ -8198,7 +8353,6 @@ class TypeFeedbackInfo: public Struct { inline void set_inlined_type_change_checksum(int checksum); inline bool matches_inlined_type_change_checksum(int checksum); - DECL_ACCESSORS(feedback_vector, FixedArray) static inline TypeFeedbackInfo* cast(Object* obj); @@ -8208,10 +8362,9 @@ class TypeFeedbackInfo: public Struct { static const int kStorage1Offset = HeapObject::kHeaderSize; static const int kStorage2Offset = kStorage1Offset + kPointerSize; - static const int kFeedbackVectorOffset = - kStorage2Offset + kPointerSize; - static const int kSize = kFeedbackVectorOffset + kPointerSize; + static const int kSize = kStorage2Offset + kPointerSize; + // TODO(mvstanton): move these sentinel declarations to shared function info. // The object that indicates an uninitialized cache. static inline Handle<Object> UninitializedSentinel(Isolate* isolate); @@ -8227,9 +8380,6 @@ class TypeFeedbackInfo: public Struct { // garbage collection (e.g., for patching the cache). static inline Object* RawUninitializedSentinel(Heap* heap); - static const int kForInFastCaseMarker = 0; - static const int kForInSlowCaseMarker = 1; - private: static const int kTypeChangeChecksumBits = 7; @@ -8614,6 +8764,7 @@ class Name: public HeapObject { // Equality operations. inline bool Equals(Name* other); + inline static bool Equals(Handle<Name> one, Handle<Name> two); // Conversion. inline bool AsArrayIndex(uint32_t* index); @@ -8750,28 +8901,37 @@ class String: public Name { // true. Vector<const uint8_t> ToOneByteVector() { ASSERT_EQ(ASCII, state_); - return buffer_; + return Vector<const uint8_t>(onebyte_start, length_); } // Return the two-byte content of the string. Only use if IsTwoByte() // returns true. Vector<const uc16> ToUC16Vector() { ASSERT_EQ(TWO_BYTE, state_); - return Vector<const uc16>::cast(buffer_); + return Vector<const uc16>(twobyte_start, length_); + } + + uc16 Get(int i) { + ASSERT(i < length_); + ASSERT(state_ != NON_FLAT); + if (state_ == ASCII) return onebyte_start[i]; + return twobyte_start[i]; } private: enum State { NON_FLAT, ASCII, TWO_BYTE }; // Constructors only used by String::GetFlatContent(). - explicit FlatContent(Vector<const uint8_t> chars) - : buffer_(chars), - state_(ASCII) { } - explicit FlatContent(Vector<const uc16> chars) - : buffer_(Vector<const byte>::cast(chars)), - state_(TWO_BYTE) { } - FlatContent() : buffer_(), state_(NON_FLAT) { } - - Vector<const uint8_t> buffer_; + explicit FlatContent(const uint8_t* start, int length) + : onebyte_start(start), length_(length), state_(ASCII) { } + explicit FlatContent(const uc16* start, int length) + : twobyte_start(start), length_(length), state_(TWO_BYTE) { } + FlatContent() : onebyte_start(NULL), length_(0), state_(NON_FLAT) { } + + union { + const uint8_t* onebyte_start; + const uc16* twobyte_start; + }; + int length_; State state_; friend class String; @@ -8781,6 +8941,11 @@ class String: public Name { inline int length(); inline void set_length(int value); + // Get and set the length of the string using acquire loads and release + // stores. + inline int synchronized_length(); + inline void synchronized_set_length(int value); + // Returns whether this string has only ASCII chars, i.e. all of them can // be ASCII encoded. This might be the case even if the string is // two-byte. Such strings may appear when the embedder prefers @@ -8804,7 +8969,7 @@ class String: public Name { // to this method are not efficient unless the string is flat. INLINE(uint16_t Get(int index)); - // Try to flatten the string. Checks first inline to see if it is + // Flattens the string. Checks first inline to see if it is // necessary. Does nothing if the string is not a cons string. // Flattening allocates a sequential string with the same data as // the given string and mutates the cons string to a degenerate @@ -8816,15 +8981,9 @@ class String: public Name { // // Degenerate cons strings are handled specially by the garbage // collector (see IsShortcutCandidate). - // - // Use FlattenString from Handles.cc to flatten even in case an - // allocation failure happens. - inline MaybeObject* TryFlatten(PretenureFlag pretenure = NOT_TENURED); - // Convenience function. Has exactly the same behavior as - // TryFlatten(), except in the case of failure returns the original - // string. - inline String* TryFlattenGetString(PretenureFlag pretenure = NOT_TENURED); + static inline Handle<String> Flatten(Handle<String> string, + PretenureFlag pretenure = NOT_TENURED); // Tries to return the content of a flat string as a structure holding either // a flat vector of char or of uc16. @@ -8843,6 +9002,7 @@ class String: public Name { // String equality operations. inline bool Equals(String* other); + inline static bool Equals(Handle<String> one, Handle<String> two); bool IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match = false); bool IsOneByteEqualTo(Vector<const uint8_t> str); bool IsTwoByteEqualTo(Vector<const uc16> str); @@ -8914,6 +9074,7 @@ class String: public Name { static const int32_t kMaxOneByteCharCode = unibrow::Latin1::kMaxChar; static const uint32_t kMaxOneByteCharCodeU = unibrow::Latin1::kMaxChar; static const int kMaxUtf16CodeUnit = 0xffff; + static const uint32_t kMaxUtf16CodeUnitU = kMaxUtf16CodeUnit; // Value of hash field containing computed hash equal to zero. static const int kEmptyStringHash = kIsNotArrayIndexMask; @@ -8986,42 +9147,26 @@ class String: public Name { return NonOneByteStart(chars, length) >= length; } - // TODO(dcarney): Replace all instances of this with VisitFlat. - template<class Visitor, class ConsOp> - static inline void Visit(String* string, - unsigned offset, - Visitor& visitor, - ConsOp& cons_op, - int32_t type, - unsigned length); - template<class Visitor> static inline ConsString* VisitFlat(Visitor* visitor, String* string, - int offset, - int length, - int32_t type); + int offset = 0); - template<class Visitor> - static inline ConsString* VisitFlat(Visitor* visitor, - String* string, - int offset = 0) { - int32_t type = string->map()->instance_type(); - return VisitFlat(visitor, string, offset, string->length(), type); - } + static Handle<FixedArray> CalculateLineEnds(Handle<String> string, + bool include_ending_line); private: friend class Name; - // Try to flatten the top level ConsString that is hiding behind this - // string. This is a no-op unless the string is a ConsString. Flatten - // mutates the ConsString and might return a failure. - MUST_USE_RESULT MaybeObject* SlowTryFlatten(PretenureFlag pretenure); + static Handle<String> SlowFlatten(Handle<ConsString> cons, + PretenureFlag tenure); // Slow case of String::Equals. This implementation works on any strings // but it is most efficient on strings that are almost flat. bool SlowEquals(String* other); + static bool SlowEquals(Handle<String> one, Handle<String> two); + // Slow case of AsArrayIndex. bool SlowAsArrayIndex(uint32_t* index); @@ -9391,57 +9536,63 @@ class ConsStringNullOp { // This maintains an off-stack representation of the stack frames required // to traverse a ConsString, allowing an entirely iterative and restartable // traversal of the entire string -// Note: this class is not GC-safe. class ConsStringIteratorOp { public: inline ConsStringIteratorOp() {} - String* Operate(String* string, - unsigned* offset_out, - int32_t* type_out, - unsigned* length_out); - inline String* ContinueOperation(int32_t* type_out, unsigned* length_out); - inline void Reset(); - inline bool HasMore(); + inline ConsStringIteratorOp(ConsString* cons_string, int offset = 0) { + Reset(cons_string, offset); + } + inline void Reset(ConsString* cons_string, int offset = 0) { + depth_ = 0; + // Next will always return NULL. + if (cons_string == NULL) return; + Initialize(cons_string, offset); + } + // Returns NULL when complete. + inline String* Next(int* offset_out) { + *offset_out = 0; + if (depth_ == 0) return NULL; + return Continue(offset_out); + } private: - // TODO(dcarney): Templatize this out for different stack sizes. - static const unsigned kStackSize = 32; + static const int kStackSize = 32; // Use a mask instead of doing modulo operations for stack wrapping. - static const unsigned kDepthMask = kStackSize-1; + static const int kDepthMask = kStackSize-1; STATIC_ASSERT(IS_POWER_OF_TWO(kStackSize)); - static inline unsigned OffsetForDepth(unsigned depth); + static inline int OffsetForDepth(int depth); inline void PushLeft(ConsString* string); inline void PushRight(ConsString* string); inline void AdjustMaximumDepth(); inline void Pop(); - String* NextLeaf(bool* blew_stack, int32_t* type_out, unsigned* length_out); - String* Search(unsigned* offset_out, - int32_t* type_out, - unsigned* length_out); + inline bool StackBlown() { return maximum_depth_ - depth_ == kStackSize; } + void Initialize(ConsString* cons_string, int offset); + String* Continue(int* offset_out); + String* NextLeaf(bool* blew_stack); + String* Search(int* offset_out); - unsigned depth_; - unsigned maximum_depth_; // Stack must always contain only frames for which right traversal // has not yet been performed. ConsString* frames_[kStackSize]; - unsigned consumed_; ConsString* root_; + int depth_; + int maximum_depth_; + int consumed_; DISALLOW_COPY_AND_ASSIGN(ConsStringIteratorOp); }; -// Note: this class is not GC-safe. class StringCharacterStream { public: inline StringCharacterStream(String* string, ConsStringIteratorOp* op, - unsigned offset = 0); + int offset = 0); inline uint16_t GetNext(); inline bool HasMore(); - inline void Reset(String* string, unsigned offset = 0); - inline void VisitOneByteString(const uint8_t* chars, unsigned length); - inline void VisitTwoByteString(const uint16_t* chars, unsigned length); + inline void Reset(String* string, int offset = 0); + inline void VisitOneByteString(const uint8_t* chars, int length); + inline void VisitTwoByteString(const uint16_t* chars, int length); private: bool is_one_byte_; @@ -9487,10 +9638,11 @@ class Oddball: public HeapObject { DECLARE_VERIFIER(Oddball) // Initialize the fields. - MUST_USE_RESULT MaybeObject* Initialize(Heap* heap, - const char* to_string, - Object* to_number, - byte kind); + static void Initialize(Isolate* isolate, + Handle<Oddball> oddball, + const char* to_string, + Handle<Object> to_number, + byte kind); // Layout description. static const int kToStringOffset = HeapObject::kHeaderSize; @@ -9507,6 +9659,7 @@ class Oddball: public HeapObject { static const byte kUndefined = 5; static const byte kUninitialized = 6; static const byte kOther = 7; + static const byte kException = 8; typedef FixedBodyDescriptor<kToStringOffset, kToNumberOffset + kPointerSize, @@ -9578,9 +9731,8 @@ class PropertyCell: public Cell { static Handle<HeapType> UpdatedType(Handle<PropertyCell> cell, Handle<Object> value); - void AddDependentCompilationInfo(CompilationInfo* info); - - void AddDependentCode(Handle<Code> code); + static void AddDependentCompilationInfo(Handle<PropertyCell> cell, + CompilationInfo* info); // Casting. static inline PropertyCell* cast(Object* obj); @@ -9623,18 +9775,21 @@ class JSProxy: public JSReceiver { // Casting. static inline JSProxy* cast(Object* obj); - MUST_USE_RESULT MaybeObject* GetPropertyWithHandler( - Object* receiver, - Name* name); - MUST_USE_RESULT MaybeObject* GetElementWithHandler( - Object* receiver, + MUST_USE_RESULT static MaybeHandle<Object> GetPropertyWithHandler( + Handle<JSProxy> proxy, + Handle<Object> receiver, + Handle<Name> name); + MUST_USE_RESULT static inline MaybeHandle<Object> GetElementWithHandler( + Handle<JSProxy> proxy, + Handle<Object> receiver, uint32_t index); // If the handler defines an accessor property with a setter, invoke it. // If it defines an accessor property without a setter, or a data property // that is read-only, throw. In all these cases set '*done' to true, // otherwise set it to false. - static Handle<Object> SetPropertyViaPrototypesWithHandler( + MUST_USE_RESULT + static MaybeHandle<Object> SetPropertyViaPrototypesWithHandler( Handle<JSProxy> proxy, Handle<JSReceiver> receiver, Handle<Name> name, @@ -9660,10 +9815,12 @@ class JSProxy: public JSReceiver { // Invoke a trap by name. If the trap does not exist on this's handler, // but derived_trap is non-NULL, invoke that instead. May cause GC. - Handle<Object> CallTrap(const char* name, - Handle<Object> derived_trap, - int argc, - Handle<Object> args[]); + MUST_USE_RESULT static MaybeHandle<Object> CallTrap( + Handle<JSProxy> proxy, + const char* name, + Handle<Object> derived_trap, + int argc, + Handle<Object> args[]); // Dispatched behavior. DECLARE_PRINTER(JSProxy) @@ -9688,27 +9845,32 @@ class JSProxy: public JSReceiver { private: friend class JSReceiver; - static Handle<Object> SetPropertyWithHandler(Handle<JSProxy> proxy, - Handle<JSReceiver> receiver, - Handle<Name> name, - Handle<Object> value, - PropertyAttributes attributes, - StrictMode strict_mode); - static Handle<Object> SetElementWithHandler(Handle<JSProxy> proxy, - Handle<JSReceiver> receiver, - uint32_t index, - Handle<Object> value, - StrictMode strict_mode); + MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithHandler( + Handle<JSProxy> proxy, + Handle<JSReceiver> receiver, + Handle<Name> name, + Handle<Object> value, + PropertyAttributes attributes, + StrictMode strict_mode); + MUST_USE_RESULT static inline MaybeHandle<Object> SetElementWithHandler( + Handle<JSProxy> proxy, + Handle<JSReceiver> receiver, + uint32_t index, + Handle<Object> value, + StrictMode strict_mode); static bool HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name); - static bool HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index); + static inline bool HasElementWithHandler(Handle<JSProxy> proxy, + uint32_t index); - static Handle<Object> DeletePropertyWithHandler(Handle<JSProxy> proxy, - Handle<Name> name, - DeleteMode mode); - static Handle<Object> DeleteElementWithHandler(Handle<JSProxy> proxy, - uint32_t index, - DeleteMode mode); + MUST_USE_RESULT static MaybeHandle<Object> DeletePropertyWithHandler( + Handle<JSProxy> proxy, + Handle<Name> name, + DeleteMode mode); + MUST_USE_RESULT static MaybeHandle<Object> DeleteElementWithHandler( + Handle<JSProxy> proxy, + uint32_t index, + DeleteMode mode); MUST_USE_RESULT Object* GetIdentityHash(); @@ -9793,6 +9955,149 @@ class JSMap: public JSObject { }; +// OrderedHashTableIterator is an iterator that iterates over the keys and +// values of an OrderedHashTable. +// +// The hash table has a reference to the iterator and the iterators themselves +// have references to the [next_iterator] and [previous_iterator], thus creating +// a double linked list. +// +// When the hash table changes the iterators are called to update their [index] +// and [count]. The hash table calls [EntryRemoved], [TableCompacted] as well +// as [TableCleared]. +// +// When an iterator is done it closes itself. It removes itself from the double +// linked list and it sets its [table] to undefined, no longer keeping the +// [table] alive. +template<class Derived, class TableType> +class OrderedHashTableIterator: public JSObject { + public: + // [table]: the backing hash table mapping keys to values. + DECL_ACCESSORS(table, Object) + + // [index]: The index into the data table. + DECL_ACCESSORS(index, Smi) + + // [count]: The logical index into the data table, ignoring the holes. + DECL_ACCESSORS(count, Smi) + + // [kind]: The kind of iteration this is. One of the [Kind] enum values. + DECL_ACCESSORS(kind, Smi) + + // [next_iterator]: Used as a double linked list for the live iterators. + DECL_ACCESSORS(next_iterator, Object) + + // [previous_iterator]: Used as a double linked list for the live iterators. + DECL_ACCESSORS(previous_iterator, Object) + +#ifdef OBJECT_PRINT + void OrderedHashTableIteratorPrint(FILE* out); +#endif + + static const int kTableOffset = JSObject::kHeaderSize; + static const int kIndexOffset = kTableOffset + kPointerSize; + static const int kCountOffset = kIndexOffset + kPointerSize; + static const int kKindOffset = kCountOffset + kPointerSize; + static const int kNextIteratorOffset = kKindOffset + kPointerSize; + static const int kPreviousIteratorOffset = kNextIteratorOffset + kPointerSize; + static const int kSize = kPreviousIteratorOffset + kPointerSize; + + enum Kind { + kKindKeys = 1, + kKindValues = 2, + kKindEntries = 3 + }; + + // Called by the underlying [table] when an entry is removed. + void EntryRemoved(int index); + + // Called by the underlying [table] when it is compacted/rehashed. + void TableCompacted() { + // All holes have been removed so index is now same as count. + set_index(count()); + } + + // Called by the underlying [table] when it is cleared. + void TableCleared() { + set_index(Smi::FromInt(0)); + set_count(Smi::FromInt(0)); + } + + // Removes the iterator from the double linked list and removes its reference + // back to the [table]. + void Close(); + + // Returns an iterator result object: {value: any, done: boolean} and moves + // the index to the next valid entry. Closes the iterator if moving past the + // end. + static Handle<JSObject> Next(Handle<Derived> iterator); + + protected: + static Handle<Derived> CreateInternal( + Handle<Map> map, Handle<TableType> table, int kind); + + private: + // Ensures [index] is not pointing to a hole. + void Seek(); + + // Moves [index] to next valid entry. Closes the iterator if moving past the + // end. + void MoveNext(); + + bool Closed() { + return table()->IsUndefined(); + } + + DISALLOW_IMPLICIT_CONSTRUCTORS(OrderedHashTableIterator); +}; + + +class JSSetIterator: public OrderedHashTableIterator<JSSetIterator, + OrderedHashSet> { + public: + // Creates a new iterator associated with [table]. + // [kind] needs to be one of the OrderedHashTableIterator Kind enum values. + static inline Handle<JSSetIterator> Create( + Handle<OrderedHashSet> table, int kind); + + // Dispatched behavior. + DECLARE_PRINTER(JSSetIterator) + DECLARE_VERIFIER(JSSetIterator) + + // Casting. + static inline JSSetIterator* cast(Object* obj); + + static Handle<Object> ValueForKind( + Handle<JSSetIterator> iterator, int entry_index); + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(JSSetIterator); +}; + + +class JSMapIterator: public OrderedHashTableIterator<JSMapIterator, + OrderedHashMap> { + public: + // Creates a new iterator associated with [table]. + // [kind] needs to be one of the OrderedHashTableIterator Kind enum values. + static inline Handle<JSMapIterator> Create( + Handle<OrderedHashMap> table, int kind); + + // Dispatched behavior. + DECLARE_PRINTER(JSMapIterator) + DECLARE_VERIFIER(JSMapIterator) + + // Casting. + static inline JSMapIterator* cast(Object* obj); + + static Handle<Object> ValueForKind( + Handle<JSMapIterator> iterator, int entry_index); + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(JSMapIterator); +}; + + // Base class for both JSWeakMap and JSWeakSet class JSWeakCollection: public JSObject { public: @@ -10034,9 +10339,6 @@ class JSArray: public JSObject { uint32_t index, Handle<Object> value); - MUST_USE_RESULT MaybeObject* JSArrayUpdateLengthFromIndex(uint32_t index, - Object* value); - // Initialize the array with the given capacity. The function may // fail due to out-of-memory situations, but only if the requested // capacity is non-zero. @@ -10045,8 +10347,9 @@ class JSArray: public JSObject { // Initializes the array to a certain length. inline bool AllowsSetElementsLength(); // Can cause GC. - static Handle<Object> SetElementsLength(Handle<JSArray> array, - Handle<Object> length); + MUST_USE_RESULT static MaybeHandle<Object> SetElementsLength( + Handle<JSArray> array, + Handle<Object> length); // Set the content of the array to the content of storage. static inline void SetContent(Handle<JSArray> array, @@ -10582,7 +10885,6 @@ class TypeSwitchInfo: public Struct { }; -#ifdef ENABLE_DEBUGGER_SUPPORT // The DebugInfo class holds additional information for a function being // debugged. class DebugInfo: public Struct { @@ -10686,7 +10988,6 @@ class BreakPointInfo: public Struct { private: DISALLOW_IMPLICIT_CONSTRUCTORS(BreakPointInfo); }; -#endif // ENABLE_DEBUGGER_SUPPORT #undef DECL_BOOLEAN_ACCESSORS diff --git a/deps/v8/src/once.cc b/deps/v8/src/once.cc index 37fe369fb..412c66a1d 100644 --- a/deps/v8/src/once.cc +++ b/deps/v8/src/once.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "once.h" diff --git a/deps/v8/src/once.h b/deps/v8/src/once.h index a44b8fafb..938af281c 100644 --- a/deps/v8/src/once.h +++ b/deps/v8/src/once.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // emulates google3/base/once.h // diff --git a/deps/v8/src/optimizing-compiler-thread.cc b/deps/v8/src/optimizing-compiler-thread.cc index fb3eac5d5..f26eb88ac 100644 --- a/deps/v8/src/optimizing-compiler-thread.cc +++ b/deps/v8/src/optimizing-compiler-thread.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "optimizing-compiler-thread.h" diff --git a/deps/v8/src/optimizing-compiler-thread.h b/deps/v8/src/optimizing-compiler-thread.h index eae1f608f..7e2752700 100644 --- a/deps/v8/src/optimizing-compiler-thread.h +++ b/deps/v8/src/optimizing-compiler-thread.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_OPTIMIZING_COMPILER_THREAD_H_ #define V8_OPTIMIZING_COMPILER_THREAD_H_ diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc index a00adb8c1..f07f37ebb 100644 --- a/deps/v8/src/parser.cc +++ b/deps/v8/src/parser.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -205,26 +182,31 @@ void RegExpBuilder::AddQuantifierToAtom( } -Handle<String> Parser::LookupCachedSymbol(int symbol_id) { - // Make sure the cache is large enough to hold the symbol identifier. - if (symbol_cache_.length() <= symbol_id) { - // Increase length to index + 1. - symbol_cache_.AddBlock(Handle<String>::null(), - symbol_id + 1 - symbol_cache_.length(), zone()); +ScriptData* ScriptData::New(const char* data, int length) { + // The length is obviously invalid. + if (length % sizeof(unsigned) != 0) { + return NULL; } - Handle<String> result = symbol_cache_.at(symbol_id); - if (result.is_null()) { - result = scanner()->AllocateInternalizedString(isolate_); - ASSERT(!result.is_null()); - symbol_cache_.at(symbol_id) = result; - return result; + + int deserialized_data_length = length / sizeof(unsigned); + unsigned* deserialized_data; + bool owns_store = reinterpret_cast<intptr_t>(data) % sizeof(unsigned) != 0; + if (owns_store) { + // Copy the data to align it. + deserialized_data = i::NewArray<unsigned>(deserialized_data_length); + i::CopyBytes(reinterpret_cast<char*>(deserialized_data), + data, static_cast<size_t>(length)); + } else { + // If aligned, don't create a copy of the data. + deserialized_data = reinterpret_cast<unsigned*>(const_cast<char*>(data)); } - isolate()->counters()->total_preparse_symbols_skipped()->Increment(); - return result; + return new ScriptData( + Vector<unsigned>(deserialized_data, deserialized_data_length), + owns_store); } -FunctionEntry ScriptDataImpl::GetFunctionEntry(int start) { +FunctionEntry ScriptData::GetFunctionEntry(int start) { // The current pre-data entry must be a FunctionEntry with the given // start position. if ((function_index_ + FunctionEntry::kSize <= store_.length()) @@ -238,12 +220,12 @@ FunctionEntry ScriptDataImpl::GetFunctionEntry(int start) { } -int ScriptDataImpl::GetSymbolIdentifier() { +int ScriptData::GetSymbolIdentifier() { return ReadNumber(&symbol_data_); } -bool ScriptDataImpl::SanityCheck() { +bool ScriptData::SanityCheck() { // Check that the header data is valid and doesn't specify // point to positions outside the store. if (store_.length() < PreparseDataConstants::kHeaderSize) return false; @@ -279,10 +261,6 @@ bool ScriptDataImpl::SanityCheck() { static_cast<int>(store_[PreparseDataConstants::kFunctionsSizeOffset]); if (functions_size < 0) return false; if (functions_size % FunctionEntry::kSize != 0) return false; - // Check that the count of symbols is non-negative. - int symbol_count = - static_cast<int>(store_[PreparseDataConstants::kSymbolCountOffset]); - if (symbol_count < 0) return false; // Check that the total size has room for header and function entries. int minimum_size = PreparseDataConstants::kHeaderSize + functions_size; @@ -292,7 +270,7 @@ bool ScriptDataImpl::SanityCheck() { -const char* ScriptDataImpl::ReadString(unsigned* start, int* chars) { +const char* ScriptData::ReadString(unsigned* start, int* chars) { int length = start[0]; char* result = NewArray<char>(length + 1); for (int i = 0; i < length; i++) { @@ -304,20 +282,25 @@ const char* ScriptDataImpl::ReadString(unsigned* start, int* chars) { } -Scanner::Location ScriptDataImpl::MessageLocation() { +Scanner::Location ScriptData::MessageLocation() const { int beg_pos = Read(PreparseDataConstants::kMessageStartPos); int end_pos = Read(PreparseDataConstants::kMessageEndPos); return Scanner::Location(beg_pos, end_pos); } -const char* ScriptDataImpl::BuildMessage() { +bool ScriptData::IsReferenceError() const { + return Read(PreparseDataConstants::kIsReferenceErrorPos); +} + + +const char* ScriptData::BuildMessage() const { unsigned* start = ReadAddress(PreparseDataConstants::kMessageTextPos); return ReadString(start, NULL); } -Vector<const char*> ScriptDataImpl::BuildArgs() { +Vector<const char*> ScriptData::BuildArgs() const { int arg_count = Read(PreparseDataConstants::kMessageArgCountPos); const char** array = NewArray<const char*>(arg_count); // Position after text found by skipping past length field and @@ -333,12 +316,12 @@ Vector<const char*> ScriptDataImpl::BuildArgs() { } -unsigned ScriptDataImpl::Read(int position) { +unsigned ScriptData::Read(int position) const { return store_[PreparseDataConstants::kHeaderSize + position]; } -unsigned* ScriptDataImpl::ReadAddress(int position) { +unsigned* ScriptData::ReadAddress(int position) const { return &store_[PreparseDataConstants::kHeaderSize + position]; } @@ -418,10 +401,9 @@ class TargetScope BASE_EMBEDDED { // Implementation of Parser bool ParserTraits::IsEvalOrArguments(Handle<String> identifier) const { - return identifier.is_identical_to( - parser_->isolate()->factory()->eval_string()) || - identifier.is_identical_to( - parser_->isolate()->factory()->arguments_string()); + Factory* factory = parser_->isolate()->factory(); + return identifier.is_identical_to(factory->eval_string()) + || identifier.is_identical_to(factory->arguments_string()); } @@ -480,19 +462,6 @@ Expression* ParserTraits::MarkExpressionAsLValue(Expression* expression) { } -void ParserTraits::CheckStrictModeLValue(Expression* expression, - bool* ok) { - VariableProxy* lhs = expression != NULL - ? expression->AsVariableProxy() - : NULL; - if (lhs != NULL && !lhs->is_this() && IsEvalOrArguments(lhs->name())) { - parser_->ReportMessage("strict_eval_arguments", - Vector<const char*>::empty()); - *ok = false; - } -} - - bool ParserTraits::ShortcutNumericLiteralBinaryExpression( Expression** x, Expression* y, Token::Value op, int pos, AstNodeFactory<AstConstructionVisitor>* factory) { @@ -599,6 +568,59 @@ Expression* ParserTraits::BuildUnaryExpression( } +Expression* ParserTraits::NewThrowReferenceError(const char* message, int pos) { + return NewThrowError( + parser_->isolate()->factory()->MakeReferenceError_string(), + message, HandleVector<Object>(NULL, 0), pos); +} + + +Expression* ParserTraits::NewThrowSyntaxError( + const char* message, Handle<Object> arg, int pos) { + int argc = arg.is_null() ? 0 : 1; + Vector< Handle<Object> > arguments = HandleVector<Object>(&arg, argc); + return NewThrowError( + parser_->isolate()->factory()->MakeSyntaxError_string(), + message, arguments, pos); +} + + +Expression* ParserTraits::NewThrowTypeError( + const char* message, Handle<Object> arg, int pos) { + int argc = arg.is_null() ? 0 : 1; + Vector< Handle<Object> > arguments = HandleVector<Object>(&arg, argc); + return NewThrowError( + parser_->isolate()->factory()->MakeTypeError_string(), + message, arguments, pos); +} + + +Expression* ParserTraits::NewThrowError( + Handle<String> constructor, const char* message, + Vector<Handle<Object> > arguments, int pos) { + Zone* zone = parser_->zone(); + Factory* factory = parser_->isolate()->factory(); + int argc = arguments.length(); + Handle<FixedArray> elements = factory->NewFixedArray(argc, TENURED); + for (int i = 0; i < argc; i++) { + Handle<Object> element = arguments[i]; + if (!element.is_null()) { + elements->set(i, *element); + } + } + Handle<JSArray> array = + factory->NewJSArrayWithElements(elements, FAST_ELEMENTS, TENURED); + + ZoneList<Expression*>* args = new(zone) ZoneList<Expression*>(2, zone); + Handle<String> type = factory->InternalizeUtf8String(message); + args->Add(parser_->factory()->NewLiteral(type, pos), zone); + args->Add(parser_->factory()->NewLiteral(array, pos), zone); + CallRuntime* call_constructor = + parser_->factory()->NewCallRuntime(constructor, NULL, args, pos); + return parser_->factory()->NewThrow(call_constructor, pos); +} + + void ParserTraits::ReportMessageAt(Scanner::Location source_location, const char* message, Vector<const char*> args, @@ -615,8 +637,8 @@ void ParserTraits::ReportMessageAt(Scanner::Location source_location, Factory* factory = parser_->isolate()->factory(); Handle<FixedArray> elements = factory->NewFixedArray(args.length()); for (int i = 0; i < args.length(); i++) { - Handle<String> arg_string = factory->NewStringFromUtf8(CStrVector(args[i])); - ASSERT(!arg_string.is_null()); + Handle<String> arg_string = + factory->NewStringFromUtf8(CStrVector(args[i])).ToHandleChecked(); elements->set(i, *arg_string); } Handle<JSArray> array = factory->NewJSArrayWithElements(elements); @@ -662,20 +684,8 @@ void ParserTraits::ReportMessageAt(Scanner::Location source_location, Handle<String> ParserTraits::GetSymbol(Scanner* scanner) { - if (parser_->cached_data_mode() == CONSUME_CACHED_DATA) { - int symbol_id = (*parser_->cached_data())->GetSymbolIdentifier(); - // If there is no symbol data, -1 will be returned. - if (symbol_id >= 0 && - symbol_id < (*parser_->cached_data())->symbol_count()) { - return parser_->LookupCachedSymbol(symbol_id); - } - } else if (parser_->cached_data_mode() == PRODUCE_CACHED_DATA) { - if (parser_->log_->ShouldLogSymbols()) { - parser_->scanner()->LogSymbol(parser_->log_, parser_->position()); - } - } Handle<String> result = - parser_->scanner()->AllocateInternalizedString(parser_->isolate_); + parser_->scanner()->AllocateInternalizedString(parser_->isolate()); ASSERT(!result.is_null()); return result; } @@ -774,7 +784,6 @@ Parser::Parser(CompilationInfo* info) info->zone(), this), isolate_(info->isolate()), - symbol_cache_(0, info->zone()), script_(info->script()), scanner_(isolate_->unicode_cache()), reusable_preparser_(NULL), @@ -815,7 +824,7 @@ FunctionLiteral* Parser::ParseProgram() { (*cached_data_)->Initialize(); } - source->TryFlatten(); + source = String::Flatten(source); FunctionLiteral* result; if (source->IsExternalTwoByteString()) { // Notice that the stream is destroyed at the end of the branch block. @@ -845,8 +854,10 @@ FunctionLiteral* Parser::ParseProgram() { PrintF(" - took %0.3f ms]\n", ms); } if (cached_data_mode_ == PRODUCE_CACHED_DATA) { - Vector<unsigned> store = recorder.ExtractData(); - *cached_data_ = new ScriptDataImpl(store); + if (result != NULL) { + Vector<unsigned> store = recorder.ExtractData(); + *cached_data_ = new ScriptData(store); + } log_ = NULL; } return result; @@ -928,7 +939,6 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info, FunctionLiteral::kNotGenerator, 0); result->set_ast_properties(factory()->visitor()->ast_properties()); - result->set_slot_processor(factory()->visitor()->slot_processor()); result->set_dont_optimize_reason( factory()->visitor()->dont_optimize_reason()); } else if (stack_overflow()) { @@ -954,7 +964,7 @@ FunctionLiteral* Parser::ParseLazy() { Handle<SharedFunctionInfo> shared_info = info()->shared_info(); // Initialize parser state. - source->TryFlatten(); + source = String::Flatten(source); FunctionLiteral* result; if (source->IsExternalTwoByteString()) { ExternalTwoByteStringUtf16CharacterStream stream( @@ -1082,7 +1092,8 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor, // Check "use strict" directive (ES5 14.1). if (strict_mode() == SLOPPY && - directive->Equals(isolate()->heap()->use_strict_string()) && + String::Equals(isolate()->factory()->use_strict_string(), + directive) && token_loc.end_pos - token_loc.beg_pos == isolate()->heap()->use_strict_string()->length() + 2) { // TODO(mstarzinger): Global strict eval calls, need their own scope @@ -1151,8 +1162,8 @@ Statement* Parser::ParseModuleElement(ZoneStringList* labels, ExpressionStatement* estmt = stmt->AsExpressionStatement(); if (estmt != NULL && estmt->expression()->AsVariableProxy() != NULL && - estmt->expression()->AsVariableProxy()->name()->Equals( - isolate()->heap()->module_string()) && + String::Equals(isolate()->factory()->module_string(), + estmt->expression()->AsVariableProxy()->name()) && !scanner()->literal_contains_escapes()) { return ParseModuleDeclaration(NULL, ok); } @@ -1707,18 +1718,14 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) { // In harmony we treat re-declarations as early errors. See // ES5 16 for a definition of early errors. SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS); - const char* elms[2] = { "Variable", c_string.get() }; - Vector<const char*> args(elms, 2); - ReportMessage("redeclaration", args); + const char* elms[1] = { c_string.get() }; + Vector<const char*> args(elms, 1); + ReportMessage("var_redeclaration", args); *ok = false; return; } - Handle<String> message_string = - isolate()->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("Variable")); - Expression* expression = - NewThrowTypeError(isolate()->factory()->redeclaration_string(), - message_string, name); + Expression* expression = NewThrowTypeError( + "var_redeclaration", name, declaration->position()); declaration_scope->SetIllegalRedeclaration(expression); } } @@ -2327,9 +2334,9 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels, // make later anyway so we should go back and fix this then. if (ContainsLabel(labels, label) || TargetStackContainsLabel(label)) { SmartArrayPointer<char> c_string = label->ToCString(DISALLOW_NULLS); - const char* elms[2] = { "Label", c_string.get() }; - Vector<const char*> args(elms, 2); - ReportMessage("redeclaration", args); + const char* elms[1] = { c_string.get() }; + Vector<const char*> args(elms, 1); + ReportMessage("label_redeclaration", args); *ok = false; return NULL; } @@ -2353,8 +2360,8 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels, !scanner()->HasAnyLineTerminatorBeforeNext() && expr != NULL && expr->AsVariableProxy() != NULL && - expr->AsVariableProxy()->name()->Equals( - isolate()->heap()->native_string()) && + String::Equals(isolate()->factory()->native_string(), + expr->AsVariableProxy()->name()) && !scanner()->literal_contains_escapes()) { return ParseNativeDeclaration(ok); } @@ -2365,8 +2372,8 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels, peek() != Token::IDENTIFIER || scanner()->HasAnyLineTerminatorBeforeNext() || expr->AsVariableProxy() == NULL || - !expr->AsVariableProxy()->name()->Equals( - isolate()->heap()->module_string()) || + !String::Equals(isolate()->factory()->module_string(), + expr->AsVariableProxy()->name()) || scanner()->literal_contains_escapes()) { ExpectSemicolon(CHECK_OK); } @@ -2474,7 +2481,7 @@ Statement* Parser::ParseReturnStatement(bool* ok) { // reporting any errors on it, because of the way errors are // reported (underlining). Expect(Token::RETURN, CHECK_OK); - int pos = position(); + Scanner::Location loc = scanner()->location(); Token::Value tok = peek(); Statement* result; @@ -2492,24 +2499,17 @@ Statement* Parser::ParseReturnStatement(bool* ok) { Expression* generator = factory()->NewVariableProxy( function_state_->generator_object_variable()); Expression* yield = factory()->NewYield( - generator, return_value, Yield::FINAL, pos); - result = factory()->NewExpressionStatement(yield, pos); + generator, return_value, Yield::FINAL, loc.beg_pos); + result = factory()->NewExpressionStatement(yield, loc.beg_pos); } else { - result = factory()->NewReturnStatement(return_value, pos); + result = factory()->NewReturnStatement(return_value, loc.beg_pos); } - // An ECMAScript program is considered syntactically incorrect if it - // contains a return statement that is not within the body of a - // function. See ECMA-262, section 12.9, page 67. - // - // To be consistent with KJS we report the syntax error at runtime. - Scope* declaration_scope = scope_->DeclarationScope(); - if (declaration_scope->is_global_scope() || - declaration_scope->is_eval_scope()) { - Handle<String> message = isolate()->factory()->illegal_return_string(); - Expression* throw_error = - NewThrowSyntaxError(message, Handle<Object>::null()); - return factory()->NewExpressionStatement(throw_error, pos); + Scope* decl_scope = scope_->DeclarationScope(); + if (decl_scope->is_global_scope() || decl_scope->is_eval_scope()) { + ReportMessageAt(loc, "illegal_return"); + *ok = false; + return NULL; } return result; } @@ -2941,9 +2941,11 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { // TODO(keuchel): Move the temporary variable to the block scope, after // implementing stack allocated block scoped variables. Factory* heap_factory = isolate()->factory(); - Handle<String> tempstr = - heap_factory->NewConsString(heap_factory->dot_for_string(), name); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate(), tempstr, 0); + Handle<String> tempstr; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate(), tempstr, + heap_factory->NewConsString(heap_factory->dot_for_string(), name), + 0); Handle<String> tempname = heap_factory->InternalizeString(tempstr); Variable* temp = scope_->DeclarationScope()->NewTemporary(tempname); VariableProxy* temp_proxy = factory()->NewVariableProxy(temp); @@ -2987,11 +2989,9 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { bool accept_OF = expression->AsVariableProxy(); if (CheckInOrOf(accept_OF, &mode)) { - if (expression == NULL || !expression->IsValidLeftHandSide()) { - ReportMessageAt(lhs_location, "invalid_lhs_in_for", true); - *ok = false; - return NULL; - } + expression = this->CheckAndRewriteReferenceExpression( + expression, lhs_location, "invalid_lhs_in_for", CHECK_OK); + ForEachStatement* loop = factory()->NewForEachStatement(mode, labels, pos); Target target(&this->target_stack_, loop); @@ -3078,10 +3078,10 @@ DebuggerStatement* Parser::ParseDebuggerStatement(bool* ok) { } -void Parser::ReportInvalidPreparseData(Handle<String> name, bool* ok) { +void Parser::ReportInvalidCachedData(Handle<String> name, bool* ok) { SmartArrayPointer<char> name_string = name->ToCString(DISALLOW_NULLS); const char* element[1] = { name_string.get() }; - ReportMessage("invalid_preparser_data", + ReportMessage("invalid_cached_data_function", Vector<const char*>(element, 1)); *ok = false; } @@ -3200,10 +3200,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral( FunctionLiteral::IsParenthesizedFlag parenthesized = parenthesized_function_ ? FunctionLiteral::kIsParenthesized : FunctionLiteral::kNotParenthesized; - FunctionLiteral::IsGeneratorFlag generator = is_generator - ? FunctionLiteral::kIsGenerator - : FunctionLiteral::kNotGenerator; - DeferredFeedbackSlotProcessor* slot_processor; AstProperties ast_properties; BailoutReason dont_optimize_reason = kNoReason; // Parse function body. @@ -3331,140 +3327,14 @@ FunctionLiteral* Parser::ParseFunctionLiteral( parenthesized_function_ = false; // The bit was set for this function only. if (is_lazily_parsed) { - int function_block_pos = position(); - FunctionEntry entry; - if (cached_data_mode_ == CONSUME_CACHED_DATA) { - // If we have cached data, we use it to skip parsing the function body. - // The data contains the information we need to construct the lazy - // function. - entry = (*cached_data())->GetFunctionEntry(function_block_pos); - if (entry.is_valid()) { - if (entry.end_pos() <= function_block_pos) { - // End position greater than end of stream is safe, and hard - // to check. - ReportInvalidPreparseData(function_name, CHECK_OK); - } - scanner()->SeekForward(entry.end_pos() - 1); - - scope->set_end_position(entry.end_pos()); - Expect(Token::RBRACE, CHECK_OK); - isolate()->counters()->total_preparse_skipped()->Increment( - scope->end_position() - function_block_pos); - materialized_literal_count = entry.literal_count(); - expected_property_count = entry.property_count(); - scope_->SetStrictMode(entry.strict_mode()); - } else { - // This case happens when we have preparse data but it doesn't contain - // an entry for the function. As a safety net, fall back to eager - // parsing. It is unclear whether PreParser's laziness analysis can - // produce different results than the Parser's laziness analysis (see - // https://codereview.chromium.org/7565003 ). In this case, we must - // discard all the preparse data, since the symbol data will be wrong. - is_lazily_parsed = false; - cached_data_mode_ = NO_CACHED_DATA; - } - } else { - // With no cached data, we partially parse the function, without - // building an AST. This gathers the data needed to build a lazy - // function. - // FIXME(marja): Now the PreParser doesn't need to log functions / - // symbols; only errors -> clean that up. - SingletonLogger logger; - PreParser::PreParseResult result = LazyParseFunctionLiteral(&logger); - if (result == PreParser::kPreParseStackOverflow) { - // Propagate stack overflow. - set_stack_overflow(); - *ok = false; - return NULL; - } - if (logger.has_error()) { - const char* arg = logger.argument_opt(); - Vector<const char*> args; - if (arg != NULL) { - args = Vector<const char*>(&arg, 1); - } - ParserTraits::ReportMessageAt( - Scanner::Location(logger.start(), logger.end()), - logger.message(), - args); - *ok = false; - return NULL; - } - scope->set_end_position(logger.end()); - Expect(Token::RBRACE, CHECK_OK); - isolate()->counters()->total_preparse_skipped()->Increment( - scope->end_position() - function_block_pos); - materialized_literal_count = logger.literals(); - expected_property_count = logger.properties(); - scope_->SetStrictMode(logger.strict_mode()); - if (cached_data_mode_ == PRODUCE_CACHED_DATA) { - ASSERT(log_); - // Position right after terminal '}'. - int body_end = scanner()->location().end_pos; - log_->LogFunction(function_block_pos, body_end, - materialized_literal_count, - expected_property_count, - scope_->strict_mode()); - } - } - } - - if (!is_lazily_parsed) { - // Everything inside an eagerly parsed function will be parsed eagerly - // (see comment above). - ParsingModeScope parsing_mode(this, PARSE_EAGERLY); - body = new(zone()) ZoneList<Statement*>(8, zone()); - if (fvar != NULL) { - VariableProxy* fproxy = scope_->NewUnresolved( - factory(), function_name, Interface::NewConst()); - fproxy->BindTo(fvar); - body->Add(factory()->NewExpressionStatement( - factory()->NewAssignment(fvar_init_op, - fproxy, - factory()->NewThisFunction(pos), - RelocInfo::kNoPosition), - RelocInfo::kNoPosition), zone()); - } - - // For generators, allocate and yield an iterator on function entry. - if (is_generator) { - ZoneList<Expression*>* arguments = - new(zone()) ZoneList<Expression*>(0, zone()); - CallRuntime* allocation = factory()->NewCallRuntime( - isolate()->factory()->empty_string(), - Runtime::FunctionForId(Runtime::kHiddenCreateJSGeneratorObject), - arguments, pos); - VariableProxy* init_proxy = factory()->NewVariableProxy( - function_state_->generator_object_variable()); - Assignment* assignment = factory()->NewAssignment( - Token::INIT_VAR, init_proxy, allocation, RelocInfo::kNoPosition); - VariableProxy* get_proxy = factory()->NewVariableProxy( - function_state_->generator_object_variable()); - Yield* yield = factory()->NewYield( - get_proxy, assignment, Yield::INITIAL, RelocInfo::kNoPosition); - body->Add(factory()->NewExpressionStatement( - yield, RelocInfo::kNoPosition), zone()); - } - - ParseSourceElements(body, Token::RBRACE, false, false, CHECK_OK); - - if (is_generator) { - VariableProxy* get_proxy = factory()->NewVariableProxy( - function_state_->generator_object_variable()); - Expression *undefined = factory()->NewLiteral( - isolate()->factory()->undefined_value(), RelocInfo::kNoPosition); - Yield* yield = factory()->NewYield( - get_proxy, undefined, Yield::FINAL, RelocInfo::kNoPosition); - body->Add(factory()->NewExpressionStatement( - yield, RelocInfo::kNoPosition), zone()); - } - + SkipLazyFunctionBody(function_name, &materialized_literal_count, + &expected_property_count, CHECK_OK); + } else { + body = ParseEagerFunctionBody(function_name, pos, fvar, fvar_init_op, + is_generator, CHECK_OK); materialized_literal_count = function_state.materialized_literal_count(); expected_property_count = function_state.expected_property_count(); handler_count = function_state.handler_count(); - - Expect(Token::RBRACE, CHECK_OK); - scope->set_end_position(scanner()->location().end_pos); } // Validate strict mode. We can do this only after parsing the function, @@ -3500,7 +3370,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral( CHECK_OK); } ast_properties = *factory()->visitor()->ast_properties(); - slot_processor = factory()->visitor()->slot_processor(); dont_optimize_reason = factory()->visitor()->dont_optimize_reason(); } @@ -3508,6 +3377,9 @@ FunctionLiteral* Parser::ParseFunctionLiteral( CheckConflictingVarDeclarations(scope, CHECK_OK); } + FunctionLiteral::IsGeneratorFlag generator = is_generator + ? FunctionLiteral::kIsGenerator + : FunctionLiteral::kNotGenerator; FunctionLiteral* function_literal = factory()->NewFunctionLiteral(function_name, scope, @@ -3524,7 +3396,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral( pos); function_literal->set_function_token_position(function_token_pos); function_literal->set_ast_properties(&ast_properties); - function_literal->set_slot_processor(slot_processor); function_literal->set_dont_optimize_reason(dont_optimize_reason); if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal); @@ -3532,7 +3403,149 @@ FunctionLiteral* Parser::ParseFunctionLiteral( } -PreParser::PreParseResult Parser::LazyParseFunctionLiteral( +void Parser::SkipLazyFunctionBody(Handle<String> function_name, + int* materialized_literal_count, + int* expected_property_count, + bool* ok) { + int function_block_pos = position(); + if (cached_data_mode_ == CONSUME_CACHED_DATA) { + // If we have cached data, we use it to skip parsing the function body. The + // data contains the information we need to construct the lazy function. + FunctionEntry entry = + (*cached_data())->GetFunctionEntry(function_block_pos); + if (entry.is_valid()) { + if (entry.end_pos() <= function_block_pos) { + // End position greater than end of stream is safe, and hard to check. + ReportInvalidCachedData(function_name, ok); + if (!*ok) { + return; + } + } + scanner()->SeekForward(entry.end_pos() - 1); + + scope_->set_end_position(entry.end_pos()); + Expect(Token::RBRACE, ok); + if (!*ok) { + return; + } + isolate()->counters()->total_preparse_skipped()->Increment( + scope_->end_position() - function_block_pos); + *materialized_literal_count = entry.literal_count(); + *expected_property_count = entry.property_count(); + scope_->SetStrictMode(entry.strict_mode()); + } else { + // This case happens when we have preparse data but it doesn't contain an + // entry for the function. Fail the compilation. + ReportInvalidCachedData(function_name, ok); + return; + } + } else { + // With no cached data, we partially parse the function, without building an + // AST. This gathers the data needed to build a lazy function. + SingletonLogger logger; + PreParser::PreParseResult result = + ParseLazyFunctionBodyWithPreParser(&logger); + if (result == PreParser::kPreParseStackOverflow) { + // Propagate stack overflow. + set_stack_overflow(); + *ok = false; + return; + } + if (logger.has_error()) { + const char* arg = logger.argument_opt(); + Vector<const char*> args; + if (arg != NULL) { + args = Vector<const char*>(&arg, 1); + } + ParserTraits::ReportMessageAt( + Scanner::Location(logger.start(), logger.end()), + logger.message(), args, logger.is_reference_error()); + *ok = false; + return; + } + scope_->set_end_position(logger.end()); + Expect(Token::RBRACE, ok); + if (!*ok) { + return; + } + isolate()->counters()->total_preparse_skipped()->Increment( + scope_->end_position() - function_block_pos); + *materialized_literal_count = logger.literals(); + *expected_property_count = logger.properties(); + scope_->SetStrictMode(logger.strict_mode()); + if (cached_data_mode_ == PRODUCE_CACHED_DATA) { + ASSERT(log_); + // Position right after terminal '}'. + int body_end = scanner()->location().end_pos; + log_->LogFunction(function_block_pos, body_end, + *materialized_literal_count, + *expected_property_count, + scope_->strict_mode()); + } + } +} + + +ZoneList<Statement*>* Parser::ParseEagerFunctionBody( + Handle<String> function_name, int pos, Variable* fvar, + Token::Value fvar_init_op, bool is_generator, bool* ok) { + // Everything inside an eagerly parsed function will be parsed eagerly + // (see comment above). + ParsingModeScope parsing_mode(this, PARSE_EAGERLY); + ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8, zone()); + if (fvar != NULL) { + VariableProxy* fproxy = scope_->NewUnresolved( + factory(), function_name, Interface::NewConst()); + fproxy->BindTo(fvar); + body->Add(factory()->NewExpressionStatement( + factory()->NewAssignment(fvar_init_op, + fproxy, + factory()->NewThisFunction(pos), + RelocInfo::kNoPosition), + RelocInfo::kNoPosition), zone()); + } + + // For generators, allocate and yield an iterator on function entry. + if (is_generator) { + ZoneList<Expression*>* arguments = + new(zone()) ZoneList<Expression*>(0, zone()); + CallRuntime* allocation = factory()->NewCallRuntime( + isolate()->factory()->empty_string(), + Runtime::FunctionForId(Runtime::kHiddenCreateJSGeneratorObject), + arguments, pos); + VariableProxy* init_proxy = factory()->NewVariableProxy( + function_state_->generator_object_variable()); + Assignment* assignment = factory()->NewAssignment( + Token::INIT_VAR, init_proxy, allocation, RelocInfo::kNoPosition); + VariableProxy* get_proxy = factory()->NewVariableProxy( + function_state_->generator_object_variable()); + Yield* yield = factory()->NewYield( + get_proxy, assignment, Yield::INITIAL, RelocInfo::kNoPosition); + body->Add(factory()->NewExpressionStatement( + yield, RelocInfo::kNoPosition), zone()); + } + + ParseSourceElements(body, Token::RBRACE, false, false, CHECK_OK); + + if (is_generator) { + VariableProxy* get_proxy = factory()->NewVariableProxy( + function_state_->generator_object_variable()); + Expression *undefined = factory()->NewLiteral( + isolate()->factory()->undefined_value(), RelocInfo::kNoPosition); + Yield* yield = factory()->NewYield( + get_proxy, undefined, Yield::FINAL, RelocInfo::kNoPosition); + body->Add(factory()->NewExpressionStatement( + yield, RelocInfo::kNoPosition), zone()); + } + + Expect(Token::RBRACE, CHECK_OK); + scope_->set_end_position(scanner()->location().end_pos); + + return body; +} + + +PreParser::PreParseResult Parser::ParseLazyFunctionBodyWithPreParser( SingletonLogger* logger) { HistogramTimerScope preparse_scope(isolate()->counters()->pre_parse()); ASSERT_EQ(Token::LBRACE, scanner()->current_token()); @@ -3626,13 +3639,13 @@ void Parser::CheckConflictingVarDeclarations(Scope* scope, bool* ok) { // errors. See ES5 16 for a definition of early errors. Handle<String> name = decl->proxy()->name(); SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS); - const char* elms[2] = { "Variable", c_string.get() }; - Vector<const char*> args(elms, 2); + const char* elms[1] = { c_string.get() }; + Vector<const char*> args(elms, 1); int position = decl->proxy()->position(); Scanner::Location location = position == RelocInfo::kNoPosition ? Scanner::Location::invalid() : Scanner::Location(position, position + 1); - ParserTraits::ReportMessageAt(location, "redeclaration", args); + ParserTraits::ReportMessageAt(location, "var_redeclaration", args); *ok = false; } } @@ -3695,58 +3708,6 @@ void Parser::RegisterTargetUse(Label* target, Target* stop) { } -Expression* Parser::NewThrowReferenceError(Handle<String> message) { - return NewThrowError(isolate()->factory()->MakeReferenceError_string(), - message, HandleVector<Object>(NULL, 0)); -} - - -Expression* Parser::NewThrowSyntaxError(Handle<String> message, - Handle<Object> first) { - int argc = first.is_null() ? 0 : 1; - Vector< Handle<Object> > arguments = HandleVector<Object>(&first, argc); - return NewThrowError( - isolate()->factory()->MakeSyntaxError_string(), message, arguments); -} - - -Expression* Parser::NewThrowTypeError(Handle<String> message, - Handle<Object> first, - Handle<Object> second) { - ASSERT(!first.is_null() && !second.is_null()); - Handle<Object> elements[] = { first, second }; - Vector< Handle<Object> > arguments = - HandleVector<Object>(elements, ARRAY_SIZE(elements)); - return NewThrowError( - isolate()->factory()->MakeTypeError_string(), message, arguments); -} - - -Expression* Parser::NewThrowError(Handle<String> constructor, - Handle<String> message, - Vector< Handle<Object> > arguments) { - int argc = arguments.length(); - Handle<FixedArray> elements = isolate()->factory()->NewFixedArray(argc, - TENURED); - for (int i = 0; i < argc; i++) { - Handle<Object> element = arguments[i]; - if (!element.is_null()) { - elements->set(i, *element); - } - } - Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements( - elements, FAST_ELEMENTS, TENURED); - - int pos = position(); - ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2, zone()); - args->Add(factory()->NewLiteral(message, pos), zone()); - args->Add(factory()->NewLiteral(array, pos), zone()); - CallRuntime* call_constructor = - factory()->NewCallRuntime(constructor, NULL, args, pos); - return factory()->NewThrow(call_constructor, pos); -} - - // ---------------------------------------------------------------------------- // Regular expressions @@ -3820,8 +3781,7 @@ bool RegExpParser::simple() { RegExpTree* RegExpParser::ReportError(Vector<const char> message) { failed_ = true; - *error_ = isolate()->factory()->NewStringFromAscii(message, NOT_TENURED); - ASSERT(!error_->is_null()); + *error_ = isolate()->factory()->NewStringFromAscii(message).ToHandleChecked(); // Zip to the end to make sure the no more input is read. current_ = kEndMarker; next_pos_ = in()->length(); @@ -4556,27 +4516,27 @@ RegExpTree* RegExpParser::ParseCharacterClass() { // ---------------------------------------------------------------------------- // The Parser interface. -ScriptDataImpl::~ScriptDataImpl() { +ScriptData::~ScriptData() { if (owns_store_) store_.Dispose(); } -int ScriptDataImpl::Length() { +int ScriptData::Length() { return store_.length() * sizeof(unsigned); } -const char* ScriptDataImpl::Data() { +const char* ScriptData::Data() { return reinterpret_cast<const char*>(store_.start()); } -bool ScriptDataImpl::HasError() { +bool ScriptData::HasError() { return has_error(); } -void ScriptDataImpl::Initialize() { +void ScriptData::Initialize() { // Prepares state for use. if (store_.length() >= PreparseDataConstants::kHeaderSize) { function_index_ = PreparseDataConstants::kHeaderSize; @@ -4593,7 +4553,7 @@ void ScriptDataImpl::Initialize() { } -int ScriptDataImpl::ReadNumber(byte** source) { +int ScriptData::ReadNumber(byte** source) { // Reads a number from symbol_data_ in base 128. The most significant // bit marks that there are more digits. // If the first byte is 0x80 (kNumberTerminator), it would normally @@ -4620,33 +4580,6 @@ int ScriptDataImpl::ReadNumber(byte** source) { } -// Create a Scanner for the preparser to use as input, and preparse the source. -ScriptDataImpl* PreParserApi::PreParse(Isolate* isolate, - Utf16CharacterStream* source) { - CompleteParserRecorder recorder; - HistogramTimerScope timer(isolate->counters()->pre_parse()); - Scanner scanner(isolate->unicode_cache()); - intptr_t stack_limit = isolate->stack_guard()->real_climit(); - PreParser preparser(&scanner, &recorder, stack_limit); - preparser.set_allow_lazy(true); - preparser.set_allow_generators(FLAG_harmony_generators); - preparser.set_allow_for_of(FLAG_harmony_iteration); - preparser.set_allow_harmony_scoping(FLAG_harmony_scoping); - preparser.set_allow_harmony_numeric_literals(FLAG_harmony_numeric_literals); - scanner.Initialize(source); - PreParser::PreParseResult result = preparser.PreParseProgram(); - if (result == PreParser::kPreParseStackOverflow) { - isolate->StackOverflow(); - return NULL; - } - - // Extract the accumulated data from the recorder as a single - // contiguous vector that we are responsible for disposing. - Vector<unsigned> store = recorder.ExtractData(); - return new ScriptDataImpl(store); -} - - bool RegExpParser::ParseRegExp(FlatStringReader* input, bool multiline, RegExpCompileData* result, @@ -4684,11 +4617,12 @@ bool Parser::Parse() { SetCachedData(info()->cached_data(), info()->cached_data_mode()); if (info()->cached_data_mode() == CONSUME_CACHED_DATA && (*info()->cached_data())->has_error()) { - ScriptDataImpl* cached_data = *(info()->cached_data()); + ScriptData* cached_data = *(info()->cached_data()); Scanner::Location loc = cached_data->MessageLocation(); const char* message = cached_data->BuildMessage(); Vector<const char*> args = cached_data->BuildArgs(); - ParserTraits::ReportMessageAt(loc, message, args); + ParserTraits::ReportMessageAt(loc, message, args, + cached_data->IsReferenceError()); DeleteArray(message); for (int i = 0; i < args.length(); i++) { DeleteArray(args[i]); diff --git a/deps/v8/src/parser.h b/deps/v8/src/parser.h index f49626766..71bbfd195 100644 --- a/deps/v8/src/parser.h +++ b/deps/v8/src/parser.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PARSER_H_ #define V8_PARSER_H_ @@ -82,17 +59,22 @@ class FunctionEntry BASE_EMBEDDED { }; -class ScriptDataImpl : public ScriptData { +class ScriptData { public: - explicit ScriptDataImpl(Vector<unsigned> store) + explicit ScriptData(Vector<unsigned> store) : store_(store), owns_store_(true) { } - // Create an empty ScriptDataImpl that is guaranteed to not satisfy - // a SanityCheck. - ScriptDataImpl() : owns_store_(false) { } + ScriptData(Vector<unsigned> store, bool owns_store) + : store_(store), + owns_store_(owns_store) { } + + // The created ScriptData won't take ownership of the data. If the alignment + // is not correct, this will copy the data (and the created ScriptData will + // take ownership of the copy). + static ScriptData* New(const char* data, int length); - virtual ~ScriptDataImpl(); + virtual ~ScriptData(); virtual int Length(); virtual const char* Data(); virtual bool HasError(); @@ -104,14 +86,17 @@ class ScriptDataImpl : public ScriptData { int GetSymbolIdentifier(); bool SanityCheck(); - Scanner::Location MessageLocation(); - const char* BuildMessage(); - Vector<const char*> BuildArgs(); - - int symbol_count() { - return (store_.length() > PreparseDataConstants::kHeaderSize) - ? store_[PreparseDataConstants::kSymbolCountOffset] - : 0; + Scanner::Location MessageLocation() const; + bool IsReferenceError() const; + const char* BuildMessage() const; + Vector<const char*> BuildArgs() const; + + int function_count() { + int functions_size = + static_cast<int>(store_[PreparseDataConstants::kFunctionsSizeOffset]); + if (functions_size < 0) return 0; + if (functions_size % FunctionEntry::kSize != 0) return 0; + return functions_size / FunctionEntry::kSize; } // The following functions should only be called if SanityCheck has // returned true. @@ -120,6 +105,10 @@ class ScriptDataImpl : public ScriptData { unsigned version() { return store_[PreparseDataConstants::kVersionOffset]; } private: + // Disable copying and assigning; because of owns_store they won't be correct. + ScriptData(const ScriptData&); + ScriptData& operator=(const ScriptData&); + friend class v8::ScriptCompiler; Vector<unsigned> store_; unsigned char* symbol_data_; @@ -127,35 +116,13 @@ class ScriptDataImpl : public ScriptData { int function_index_; bool owns_store_; - unsigned Read(int position); - unsigned* ReadAddress(int position); + unsigned Read(int position) const; + unsigned* ReadAddress(int position) const; // Reads a number from the current symbols int ReadNumber(byte** source); - ScriptDataImpl(const char* backing_store, int length) - : store_(reinterpret_cast<unsigned*>(const_cast<char*>(backing_store)), - length / static_cast<int>(sizeof(unsigned))), - owns_store_(false) { - ASSERT_EQ(0, static_cast<int>( - reinterpret_cast<intptr_t>(backing_store) % sizeof(unsigned))); - } - // Read strings written by ParserRecorder::WriteString. static const char* ReadString(unsigned* start, int* chars); - - friend class ScriptData; -}; - - -class PreParserApi { - public: - // Pre-parse a character stream and return full preparse data. - // - // This interface is here instead of in preparser.h because it instantiates a - // preparser recorder object that is suited to the parser's purposes. Also, - // the preparser doesn't know about ScriptDataImpl. - static ScriptDataImpl* PreParse(Isolate* isolate, - Utf16CharacterStream* source); }; @@ -430,6 +397,7 @@ class ParserTraits { typedef ObjectLiteral::Property* ObjectLiteralProperty; typedef ZoneList<v8::internal::Expression*>* ExpressionList; typedef ZoneList<ObjectLiteral::Property*>* PropertyList; + typedef ZoneList<v8::internal::Statement*>* StatementList; // For constructing objects returned by the traversing functions. typedef AstNodeFactory<AstConstructionVisitor> Factory; @@ -441,16 +409,14 @@ class ParserTraits { template<typename FunctionState> static void SetUpFunctionState(FunctionState* function_state, Zone* zone) { Isolate* isolate = zone->isolate(); - function_state->isolate_ = isolate; function_state->saved_ast_node_id_ = isolate->ast_node_id(); isolate->set_ast_node_id(BailoutId::FirstUsable().ToInt()); } template<typename FunctionState> - static void TearDownFunctionState(FunctionState* function_state) { + static void TearDownFunctionState(FunctionState* function_state, Zone* zone) { if (function_state->outer_function_state_ != NULL) { - function_state->isolate_->set_ast_node_id( - function_state->saved_ast_node_id_); + zone->isolate()->set_ast_node_id(function_state->saved_ast_node_id_); } } @@ -462,6 +428,11 @@ class ParserTraits { static bool IsIdentifier(Expression* expression); + static Handle<String> AsIdentifier(Expression* expression) { + ASSERT(IsIdentifier(expression)); + return expression->AsVariableProxy()->name(); + } + static bool IsBoilerplateProperty(ObjectLiteral::Property* property) { return ObjectLiteral::IsBoilerplateProperty(property); } @@ -501,10 +472,6 @@ class ParserTraits { // used on for the statically checking assignments to harmony const bindings. static Expression* MarkExpressionAsLValue(Expression* expression); - // Checks LHS expression for assignment and prefix/postfix increment/decrement - // in strict mode. - void CheckStrictModeLValue(Expression* expression, bool* ok); - // Returns true if we have a binary expression between two numeric // literals. In that case, *x will be changed to an expression which is the // computed value. @@ -527,6 +494,24 @@ class ParserTraits { Expression* expression, Token::Value op, int pos, AstNodeFactory<AstConstructionVisitor>* factory); + // Generate AST node that throws a ReferenceError with the given type. + Expression* NewThrowReferenceError(const char* type, int pos); + + // Generate AST node that throws a SyntaxError with the given + // type. The first argument may be null (in the handle sense) in + // which case no arguments are passed to the constructor. + Expression* NewThrowSyntaxError( + const char* type, Handle<Object> arg, int pos); + + // Generate AST node that throws a TypeError with the given + // type. Both arguments must be non-null (in the handle sense). + Expression* NewThrowTypeError(const char* type, Handle<Object> arg, int pos); + + // Generic AST generator for throwing errors from compiled code. + Expression* NewThrowError( + Handle<String> constructor, const char* type, + Vector<Handle<Object> > arguments, int pos); + // Reporting errors. void ReportMessageAt(Scanner::Location source_location, const char* message, @@ -580,6 +565,9 @@ class ParserTraits { ZoneList<ObjectLiteral::Property*>* NewPropertyList(int size, Zone* zone) { return new(zone) ZoneList<ObjectLiteral::Property*>(size, zone); } + ZoneList<v8::internal::Statement*>* NewStatementList(int size, Zone* zone) { + return new(zone) ZoneList<v8::internal::Statement*>(size, zone); + } // Temporary glue; these functions will move to ParserBase. Expression* ParseV8Intrinsic(bool* ok); @@ -655,9 +643,9 @@ class Parser : public ParserBase<ParserTraits> { Handle<String> source); // Report syntax error - void ReportInvalidPreparseData(Handle<String> name, bool* ok); + void ReportInvalidCachedData(Handle<String> name, bool* ok); - void SetCachedData(ScriptDataImpl** data, + void SetCachedData(ScriptData** data, CachedDataMode cached_data_mode) { cached_data_mode_ = cached_data_mode; if (cached_data_mode == NO_CACHED_DATA) { @@ -665,12 +653,11 @@ class Parser : public ParserBase<ParserTraits> { } else { ASSERT(data != NULL); cached_data_ = data; - symbol_cache_.Initialize(*data ? (*data)->symbol_count() : 0, zone()); } } bool inside_with() const { return scope_->inside_with(); } - ScriptDataImpl** cached_data() const { return cached_data_; } + ScriptData** cached_data() const { return cached_data_; } CachedDataMode cached_data_mode() const { return cached_data_mode_; } Scope* DeclarationScope(VariableMode mode) { return IsLexicalVariableMode(mode) @@ -776,39 +763,32 @@ class Parser : public ParserBase<ParserTraits> { Scope* NewScope(Scope* parent, ScopeType type); - Handle<String> LookupCachedSymbol(int symbol_id); + // Skip over a lazy function, either using cached data if we have it, or + // by parsing the function with PreParser. Consumes the ending }. + void SkipLazyFunctionBody(Handle<String> function_name, + int* materialized_literal_count, + int* expected_property_count, + bool* ok); - // Generate AST node that throw a ReferenceError with the given type. - Expression* NewThrowReferenceError(Handle<String> type); - - // Generate AST node that throw a SyntaxError with the given - // type. The first argument may be null (in the handle sense) in - // which case no arguments are passed to the constructor. - Expression* NewThrowSyntaxError(Handle<String> type, Handle<Object> first); - - // Generate AST node that throw a TypeError with the given - // type. Both arguments must be non-null (in the handle sense). - Expression* NewThrowTypeError(Handle<String> type, - Handle<Object> first, - Handle<Object> second); - - // Generic AST generator for throwing errors from compiled code. - Expression* NewThrowError(Handle<String> constructor, - Handle<String> type, - Vector< Handle<Object> > arguments); + PreParser::PreParseResult ParseLazyFunctionBodyWithPreParser( + SingletonLogger* logger); - PreParser::PreParseResult LazyParseFunctionLiteral( - SingletonLogger* logger); + // Consumes the ending }. + ZoneList<Statement*>* ParseEagerFunctionBody(Handle<String> function_name, + int pos, + Variable* fvar, + Token::Value fvar_init_op, + bool is_generator, + bool* ok); Isolate* isolate_; - ZoneList<Handle<String> > symbol_cache_; Handle<Script> script_; Scanner scanner_; PreParser* reusable_preparser_; Scope* original_scope_; // for ES5 function declarations in sloppy eval Target* target_stack_; // for break, continue statements - ScriptDataImpl** cached_data_; + ScriptData** cached_data_; CachedDataMode cached_data_mode_; CompilationInfo* info_; diff --git a/deps/v8/src/platform-cygwin.cc b/deps/v8/src/platform-cygwin.cc index 4ae9bec9e..05ce57827 100644 --- a/deps/v8/src/platform-cygwin.cc +++ b/deps/v8/src/platform-cygwin.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for Cygwin goes here. For the POSIX-compatible // parts, the implementation is in platform-posix.cc. @@ -42,9 +19,7 @@ #include "v8.h" #include "platform.h" -#include "simulator.h" #include "v8threads.h" -#include "vm-state-inl.h" #include "win32-headers.h" namespace v8 { diff --git a/deps/v8/src/platform-freebsd.cc b/deps/v8/src/platform-freebsd.cc index 7d15cef6b..7e5bb8a9f 100644 --- a/deps/v8/src/platform-freebsd.cc +++ b/deps/v8/src/platform-freebsd.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for FreeBSD goes here. For the POSIX-compatible // parts, the implementation is in platform-posix.cc. @@ -54,7 +31,6 @@ #include "v8threads.h" #include "platform.h" -#include "vm-state-inl.h" namespace v8 { diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc index 527b9f616..34e8c551e 100644 --- a/deps/v8/src/platform-linux.cc +++ b/deps/v8/src/platform-linux.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for Linux goes here. For the POSIX-compatible // parts, the implementation is in platform-posix.cc. @@ -68,7 +45,6 @@ #include "platform.h" #include "v8threads.h" -#include "vm-state-inl.h" namespace v8 { diff --git a/deps/v8/src/platform-macos.cc b/deps/v8/src/platform-macos.cc index 25ba0da08..facb6bd61 100644 --- a/deps/v8/src/platform-macos.cc +++ b/deps/v8/src/platform-macos.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for MacOS goes here. For the POSIX-compatible // parts, the implementation is in platform-posix.cc. @@ -59,8 +36,6 @@ #include "v8.h" #include "platform.h" -#include "simulator.h" -#include "vm-state-inl.h" namespace v8 { diff --git a/deps/v8/src/platform-openbsd.cc b/deps/v8/src/platform-openbsd.cc index a5d477d61..21fe2b4d9 100644 --- a/deps/v8/src/platform-openbsd.cc +++ b/deps/v8/src/platform-openbsd.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for OpenBSD and NetBSD goes here. For the // POSIX-compatible parts, the implementation is in platform-posix.cc. @@ -52,7 +29,6 @@ #include "platform.h" #include "v8threads.h" -#include "vm-state-inl.h" namespace v8 { diff --git a/deps/v8/src/platform-posix.cc b/deps/v8/src/platform-posix.cc index 5ca12522c..143bf3ca1 100644 --- a/deps/v8/src/platform-posix.cc +++ b/deps/v8/src/platform-posix.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for POSIX goes here. This is not a platform on its // own, but contains the parts which are the same across the POSIX platforms @@ -66,7 +43,6 @@ #include "v8.h" -#include "codegen.h" #include "isolate-inl.h" #include "platform.h" @@ -96,6 +72,12 @@ intptr_t OS::MaxVirtualMemory() { struct rlimit limit; int result = getrlimit(RLIMIT_DATA, &limit); if (result != 0) return 0; +#if V8_OS_NACL + // The NaCl compiler doesn't like resource.h constants. + if (static_cast<int>(limit.rlim_cur) == -1) return 0; +#else + if (limit.rlim_cur == RLIM_INFINITY) return 0; +#endif return limit.rlim_cur; } @@ -214,6 +196,11 @@ void* OS::GetRandomMmapAddr() { // See http://code.google.com/p/nativeclient/issues/3341 return NULL; #endif +#if defined(ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER) || \ + defined(THREAD_SANITIZER) + // Dynamic tools do not support custom mmap addresses. + return NULL; +#endif Isolate* isolate = Isolate::UncheckedCurrent(); // Note that the current isolate isn't set up in a call path via // CpuFeatures::Probe. We don't care about randomization in this case because @@ -297,33 +284,6 @@ void OS::DebugBreak() { // ---------------------------------------------------------------------------- // Math functions -double modulo(double x, double y) { - return std::fmod(x, y); -} - - -#define UNARY_MATH_FUNCTION(name, generator) \ -static UnaryMathFunction fast_##name##_function = NULL; \ -void init_fast_##name##_function() { \ - fast_##name##_function = generator; \ -} \ -double fast_##name(double x) { \ - return (*fast_##name##_function)(x); \ -} - -UNARY_MATH_FUNCTION(exp, CreateExpFunction()) -UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction()) - -#undef UNARY_MATH_FUNCTION - - -void lazily_initialize_fast_exp() { - if (fast_exp_function == NULL) { - init_fast_exp_function(); - } -} - - double OS::nan_value() { // NAN from math.h is defined in C99 and not in POSIX. return NAN; @@ -553,8 +513,6 @@ void OS::PostSetUp() { OS::memcopy_uint8_function = CreateMemCopyUint8Function(&OS::MemCopyUint8Wrapper); #endif - // fast_exp is initialized lazily. - init_fast_sqrt_function(); } diff --git a/deps/v8/src/platform-qnx.cc b/deps/v8/src/platform-qnx.cc index ef0998f89..587a1d355 100644 --- a/deps/v8/src/platform-qnx.cc +++ b/deps/v8/src/platform-qnx.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for QNX goes here. For the POSIX-compatible // parts the implementation is in platform-posix.cc. @@ -56,7 +33,6 @@ #include "platform.h" #include "v8threads.h" -#include "vm-state-inl.h" namespace v8 { diff --git a/deps/v8/src/platform-solaris.cc b/deps/v8/src/platform-solaris.cc index f23ae0838..a2226f613 100644 --- a/deps/v8/src/platform-solaris.cc +++ b/deps/v8/src/platform-solaris.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for Solaris 10 goes here. For the POSIX-compatible // parts, the implementation is in platform-posix.cc. @@ -53,7 +30,6 @@ #include "platform.h" #include "v8threads.h" -#include "vm-state-inl.h" // It seems there is a bug in some Solaris distributions (experienced in diff --git a/deps/v8/src/platform-win32.cc b/deps/v8/src/platform-win32.cc index fe84bcd3f..08d03e149 100644 --- a/deps/v8/src/platform-win32.cc +++ b/deps/v8/src/platform-win32.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Platform-specific code for Win32. @@ -42,11 +19,8 @@ #include "v8.h" -#include "codegen.h" #include "isolate-inl.h" #include "platform.h" -#include "simulator.h" -#include "vm-state-inl.h" #ifdef _MSC_VER @@ -155,68 +129,6 @@ void OS::MemMove(void* dest, const void* src, size_t size) { #endif // V8_TARGET_ARCH_IA32 -#ifdef _WIN64 -typedef double (*ModuloFunction)(double, double); -static ModuloFunction modulo_function = NULL; -// Defined in codegen-x64.cc. -ModuloFunction CreateModuloFunction(); - -void init_modulo_function() { - modulo_function = CreateModuloFunction(); -} - - -double modulo(double x, double y) { - // Note: here we rely on dependent reads being ordered. This is true - // on all architectures we currently support. - return (*modulo_function)(x, y); -} -#else // Win32 - -double modulo(double x, double y) { - // Workaround MS fmod bugs. ECMA-262 says: - // dividend is finite and divisor is an infinity => result equals dividend - // dividend is a zero and divisor is nonzero finite => result equals dividend - if (!(std::isfinite(x) && (!std::isfinite(y) && !std::isnan(y))) && - !(x == 0 && (y != 0 && std::isfinite(y)))) { - x = fmod(x, y); - } - return x; -} - -#endif // _WIN64 - - -#define UNARY_MATH_FUNCTION(name, generator) \ -static UnaryMathFunction fast_##name##_function = NULL; \ -void init_fast_##name##_function() { \ - fast_##name##_function = generator; \ -} \ -double fast_##name(double x) { \ - return (*fast_##name##_function)(x); \ -} - -UNARY_MATH_FUNCTION(exp, CreateExpFunction()) -UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction()) - -#undef UNARY_MATH_FUNCTION - - -void lazily_initialize_fast_exp() { - if (fast_exp_function == NULL) { - init_fast_exp_function(); - } -} - - -void MathSetup() { -#ifdef _WIN64 - init_modulo_function(); -#endif - // fast_exp is initialized lazily. - init_fast_sqrt_function(); -} - class TimezoneCache { public: @@ -541,9 +453,6 @@ char* Win32Time::LocalTimezone(TimezoneCache* cache) { void OS::PostSetUp() { - // Math functions depend on CPU features therefore they are initialized after - // CPU. - MathSetup(); #if V8_TARGET_ARCH_IA32 OS::MemMoveFunction generated_memmove = CreateMemMoveFunction(); if (generated_memmove != NULL) { diff --git a/deps/v8/src/platform.h b/deps/v8/src/platform.h index d087d2397..764bd5408 100644 --- a/deps/v8/src/platform.h +++ b/deps/v8/src/platform.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This module contains the platform-specific code. This make the rest of the // code less dependent on operating system, compilers and runtime libraries. @@ -48,7 +25,7 @@ #include "platform/mutex.h" #include "platform/semaphore.h" -#include "utils.h" +#include "vector.h" #include "v8globals.h" #ifdef __sun @@ -96,15 +73,6 @@ inline int lrint(double flt) { namespace v8 { namespace internal { -double modulo(double x, double y); - -// Custom implementation of math functions. -double fast_exp(double input); -double fast_sqrt(double input); -// The custom exp implementation needs 16KB of lookup data; initialize it -// on demand. -void lazily_initialize_fast_exp(); - // ---------------------------------------------------------------------------- // Fast TLS support diff --git a/deps/v8/src/platform/condition-variable.cc b/deps/v8/src/platform/condition-variable.cc index 83c35d4b1..8e4d16a29 100644 --- a/deps/v8/src/platform/condition-variable.cc +++ b/deps/v8/src/platform/condition-variable.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "platform/condition-variable.h" diff --git a/deps/v8/src/platform/condition-variable.h b/deps/v8/src/platform/condition-variable.h index 4d8a88aee..eb357beb3 100644 --- a/deps/v8/src/platform/condition-variable.h +++ b/deps/v8/src/platform/condition-variable.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_CONDITION_VARIABLE_H_ #define V8_PLATFORM_CONDITION_VARIABLE_H_ diff --git a/deps/v8/src/platform/elapsed-timer.h b/deps/v8/src/platform/elapsed-timer.h index b61b00760..b25ff2040 100644 --- a/deps/v8/src/platform/elapsed-timer.h +++ b/deps/v8/src/platform/elapsed-timer.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_ELAPSED_TIMER_H_ #define V8_PLATFORM_ELAPSED_TIMER_H_ diff --git a/deps/v8/src/platform/mutex.cc b/deps/v8/src/platform/mutex.cc index ff4a8a379..4e9fb989b 100644 --- a/deps/v8/src/platform/mutex.cc +++ b/deps/v8/src/platform/mutex.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "platform/mutex.h" diff --git a/deps/v8/src/platform/mutex.h b/deps/v8/src/platform/mutex.h index 125e9d486..4abf7f71c 100644 --- a/deps/v8/src/platform/mutex.h +++ b/deps/v8/src/platform/mutex.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_MUTEX_H_ #define V8_PLATFORM_MUTEX_H_ diff --git a/deps/v8/src/platform/semaphore.cc b/deps/v8/src/platform/semaphore.cc index 0b82d4ad5..eae47cb36 100644 --- a/deps/v8/src/platform/semaphore.cc +++ b/deps/v8/src/platform/semaphore.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "platform/semaphore.h" diff --git a/deps/v8/src/platform/semaphore.h b/deps/v8/src/platform/semaphore.h index 0babe5fd6..434419204 100644 --- a/deps/v8/src/platform/semaphore.h +++ b/deps/v8/src/platform/semaphore.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_SEMAPHORE_H_ #define V8_PLATFORM_SEMAPHORE_H_ diff --git a/deps/v8/src/platform/socket.cc b/deps/v8/src/platform/socket.cc index 9d56cc79a..f4b33873b 100644 --- a/deps/v8/src/platform/socket.cc +++ b/deps/v8/src/platform/socket.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "platform/socket.h" diff --git a/deps/v8/src/platform/socket.h b/deps/v8/src/platform/socket.h index ff8c1de7c..8605ae0fa 100644 --- a/deps/v8/src/platform/socket.h +++ b/deps/v8/src/platform/socket.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_SOCKET_H_ #define V8_PLATFORM_SOCKET_H_ diff --git a/deps/v8/src/platform/time.cc b/deps/v8/src/platform/time.cc index 5374af802..c6b5786a6 100644 --- a/deps/v8/src/platform/time.cc +++ b/deps/v8/src/platform/time.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "platform/time.h" diff --git a/deps/v8/src/platform/time.h b/deps/v8/src/platform/time.h index 99a8d3931..8e35b8791 100644 --- a/deps/v8/src/platform/time.h +++ b/deps/v8/src/platform/time.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PLATFORM_TIME_H_ #define V8_PLATFORM_TIME_H_ diff --git a/deps/v8/src/preparse-data-format.h b/deps/v8/src/preparse-data-format.h index e2cf0a1a3..4d1ad7abb 100644 --- a/deps/v8/src/preparse-data-format.h +++ b/deps/v8/src/preparse-data-format.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PREPARSE_DATA_FORMAT_H_ #define V8_PREPARSE_DATA_FORMAT_H_ @@ -37,21 +14,21 @@ struct PreparseDataConstants { public: // Layout and constants of the preparse data exchange format. static const unsigned kMagicNumber = 0xBadDead; - static const unsigned kCurrentVersion = 8; + static const unsigned kCurrentVersion = 9; static const int kMagicOffset = 0; static const int kVersionOffset = 1; static const int kHasErrorOffset = 2; static const int kFunctionsSizeOffset = 3; - static const int kSymbolCountOffset = 4; - static const int kSizeOffset = 5; - static const int kHeaderSize = 6; + static const int kSizeOffset = 4; + static const int kHeaderSize = 5; // If encoding a message, the following positions are fixed. static const int kMessageStartPos = 0; static const int kMessageEndPos = 1; static const int kMessageArgCountPos = 2; - static const int kMessageTextPos = 3; + static const int kIsReferenceErrorPos = 3; + static const int kMessageTextPos = 4; static const unsigned char kNumberTerminator = 0x80u; }; diff --git a/deps/v8/src/preparse-data.cc b/deps/v8/src/preparse-data.cc index 9f585a991..5ddf72137 100644 --- a/deps/v8/src/preparse-data.cc +++ b/deps/v8/src/preparse-data.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "../include/v8stdint.h" @@ -38,59 +15,27 @@ namespace v8 { namespace internal { -template <typename Char> -static int vector_hash(Vector<const Char> string) { - int hash = 0; - for (int i = 0; i < string.length(); i++) { - int c = static_cast<int>(string[i]); - hash += c; - hash += (hash << 10); - hash ^= (hash >> 6); - } - return hash; -} - - -static bool vector_compare(void* a, void* b) { - CompleteParserRecorder::Key* string1 = - reinterpret_cast<CompleteParserRecorder::Key*>(a); - CompleteParserRecorder::Key* string2 = - reinterpret_cast<CompleteParserRecorder::Key*>(b); - if (string1->is_one_byte != string2->is_one_byte) return false; - int length = string1->literal_bytes.length(); - if (string2->literal_bytes.length() != length) return false; - return memcmp(string1->literal_bytes.start(), - string2->literal_bytes.start(), length) == 0; -} - - CompleteParserRecorder::CompleteParserRecorder() - : function_store_(0), - literal_chars_(0), - symbol_store_(0), - symbol_keys_(0), - string_table_(vector_compare), - symbol_id_(0) { + : function_store_(0) { preamble_[PreparseDataConstants::kMagicOffset] = PreparseDataConstants::kMagicNumber; preamble_[PreparseDataConstants::kVersionOffset] = PreparseDataConstants::kCurrentVersion; preamble_[PreparseDataConstants::kHasErrorOffset] = false; preamble_[PreparseDataConstants::kFunctionsSizeOffset] = 0; - preamble_[PreparseDataConstants::kSymbolCountOffset] = 0; preamble_[PreparseDataConstants::kSizeOffset] = 0; - ASSERT_EQ(6, PreparseDataConstants::kHeaderSize); + ASSERT_EQ(5, PreparseDataConstants::kHeaderSize); #ifdef DEBUG prev_start_ = -1; #endif - should_log_symbols_ = true; } void CompleteParserRecorder::LogMessage(int start_pos, - int end_pos, - const char* message, - const char* arg_opt) { + int end_pos, + const char* message, + const char* arg_opt, + bool is_reference_error) { if (has_error()) return; preamble_[PreparseDataConstants::kHasErrorOffset] = true; function_store_.Reset(); @@ -100,10 +45,11 @@ void CompleteParserRecorder::LogMessage(int start_pos, function_store_.Add(end_pos); STATIC_ASSERT(PreparseDataConstants::kMessageArgCountPos == 2); function_store_.Add((arg_opt == NULL) ? 0 : 1); - STATIC_ASSERT(PreparseDataConstants::kMessageTextPos == 3); + STATIC_ASSERT(PreparseDataConstants::kIsReferenceErrorPos == 3); + function_store_.Add(is_reference_error ? 1 : 0); + STATIC_ASSERT(PreparseDataConstants::kMessageTextPos == 4); WriteString(CStrVector(message)); if (arg_opt != NULL) WriteString(CStrVector(arg_opt)); - should_log_symbols_ = false; } @@ -115,92 +61,18 @@ void CompleteParserRecorder::WriteString(Vector<const char> str) { } -void CompleteParserRecorder::LogOneByteSymbol(int start, - Vector<const uint8_t> literal) { - ASSERT(should_log_symbols_); - int hash = vector_hash(literal); - LogSymbol(start, hash, true, literal); -} - - -void CompleteParserRecorder::LogTwoByteSymbol(int start, - Vector<const uint16_t> literal) { - ASSERT(should_log_symbols_); - int hash = vector_hash(literal); - LogSymbol(start, hash, false, Vector<const byte>::cast(literal)); -} - - -void CompleteParserRecorder::LogSymbol(int start, - int hash, - bool is_one_byte, - Vector<const byte> literal_bytes) { - Key key = { is_one_byte, literal_bytes }; - HashMap::Entry* entry = string_table_.Lookup(&key, hash, true); - int id = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); - if (id == 0) { - // Copy literal contents for later comparison. - key.literal_bytes = - Vector<const byte>::cast(literal_chars_.AddBlock(literal_bytes)); - // Put (symbol_id_ + 1) into entry and increment it. - id = ++symbol_id_; - entry->value = reinterpret_cast<void*>(id); - Vector<Key> symbol = symbol_keys_.AddBlock(1, key); - entry->key = &symbol[0]; - } - WriteNumber(id - 1); -} - - Vector<unsigned> CompleteParserRecorder::ExtractData() { int function_size = function_store_.size(); - // Add terminator to symbols, then pad to unsigned size. - int symbol_size = symbol_store_.size(); - int padding = sizeof(unsigned) - (symbol_size % sizeof(unsigned)); - symbol_store_.AddBlock(padding, PreparseDataConstants::kNumberTerminator); - symbol_size += padding; - int total_size = PreparseDataConstants::kHeaderSize + function_size - + (symbol_size / sizeof(unsigned)); + int total_size = PreparseDataConstants::kHeaderSize + function_size; Vector<unsigned> data = Vector<unsigned>::New(total_size); preamble_[PreparseDataConstants::kFunctionsSizeOffset] = function_size; - preamble_[PreparseDataConstants::kSymbolCountOffset] = symbol_id_; OS::MemCopy(data.start(), preamble_, sizeof(preamble_)); - int symbol_start = PreparseDataConstants::kHeaderSize + function_size; if (function_size > 0) { function_store_.WriteTo(data.SubVector(PreparseDataConstants::kHeaderSize, - symbol_start)); - } - if (!has_error()) { - symbol_store_.WriteTo( - Vector<byte>::cast(data.SubVector(symbol_start, total_size))); + total_size)); } return data; } -void CompleteParserRecorder::WriteNumber(int number) { - // Split the number into chunks of 7 bits. Write them one after another (the - // most significant first). Use the MSB of each byte for signalling that the - // number continues. See ScriptDataImpl::ReadNumber for the reading side. - ASSERT(number >= 0); - - int mask = (1 << 28) - 1; - int i = 28; - // 26 million symbols ought to be enough for anybody. - ASSERT(number <= mask); - while (number < mask) { - mask >>= 7; - i -= 7; - } - while (i > 0) { - symbol_store_.Add(static_cast<byte>(number >> i) | 0x80u); - number &= mask; - mask >>= 7; - i -= 7; - } - ASSERT(number < (1 << 7)); - symbol_store_.Add(static_cast<byte>(number)); -} - - } } // namespace v8::internal. diff --git a/deps/v8/src/preparse-data.h b/deps/v8/src/preparse-data.h index 6a968e3b2..051a90969 100644 --- a/deps/v8/src/preparse-data.h +++ b/deps/v8/src/preparse-data.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PREPARSE_DATA_H_ #define V8_PREPARSE_DATA_H_ @@ -39,7 +16,7 @@ namespace internal { // Abstract interface for preparse data recorder. class ParserRecorder { public: - ParserRecorder() : should_log_symbols_(false) { } + ParserRecorder() { } virtual ~ParserRecorder() { } // Logs the scope and some details of a function literal in the source. @@ -55,24 +32,8 @@ class ParserRecorder { virtual void LogMessage(int start, int end, const char* message, - const char* argument_opt) = 0; - - // Logs a symbol creation of a literal or identifier. - bool ShouldLogSymbols() { return should_log_symbols_; } - // The following functions are only callable on CompleteParserRecorder - // and are guarded by calls to ShouldLogSymbols. - virtual void LogOneByteSymbol(int start, Vector<const uint8_t> literal) { - UNREACHABLE(); - } - virtual void LogTwoByteSymbol(int start, Vector<const uint16_t> literal) { - UNREACHABLE(); - } - virtual void PauseRecording() { UNREACHABLE(); } - virtual void ResumeRecording() { UNREACHABLE(); } - - protected: - bool should_log_symbols_; - + const char* argument_opt, + bool is_reference_error) = 0; private: DISALLOW_COPY_AND_ASSIGN(ParserRecorder); }; @@ -80,8 +41,9 @@ class ParserRecorder { class SingletonLogger : public ParserRecorder { public: - SingletonLogger() : has_error_(false), start_(-1), end_(-1) { } - virtual ~SingletonLogger() { } + SingletonLogger() + : has_error_(false), start_(-1), end_(-1), is_reference_error_(false) {} + virtual ~SingletonLogger() {} void Reset() { has_error_ = false; } @@ -104,36 +66,39 @@ class SingletonLogger : public ParserRecorder { virtual void LogMessage(int start, int end, const char* message, - const char* argument_opt) { + const char* argument_opt, + bool is_reference_error) { if (has_error_) return; has_error_ = true; start_ = start; end_ = end; message_ = message; argument_opt_ = argument_opt; + is_reference_error_ = is_reference_error; } - bool has_error() { return has_error_; } + bool has_error() const { return has_error_; } - int start() { return start_; } - int end() { return end_; } - int literals() { + int start() const { return start_; } + int end() const { return end_; } + int literals() const { ASSERT(!has_error_); return literals_; } - int properties() { + int properties() const { ASSERT(!has_error_); return properties_; } - StrictMode strict_mode() { + StrictMode strict_mode() const { ASSERT(!has_error_); return strict_mode_; } + int is_reference_error() const { return is_reference_error_; } const char* message() { ASSERT(has_error_); return message_; } - const char* argument_opt() { + const char* argument_opt() const { ASSERT(has_error_); return argument_opt_; } @@ -149,6 +114,7 @@ class SingletonLogger : public ParserRecorder { // For error messages. const char* message_; const char* argument_opt_; + bool is_reference_error_; }; @@ -180,20 +146,8 @@ class CompleteParserRecorder : public ParserRecorder { virtual void LogMessage(int start, int end, const char* message, - const char* argument_opt); - - virtual void PauseRecording() { - ASSERT(should_log_symbols_); - should_log_symbols_ = false; - } - - virtual void ResumeRecording() { - ASSERT(!should_log_symbols_); - should_log_symbols_ = !has_error(); - } - - virtual void LogOneByteSymbol(int start, Vector<const uint8_t> literal); - virtual void LogTwoByteSymbol(int start, Vector<const uint16_t> literal); + const char* argument_opt, + bool is_reference_error_); Vector<unsigned> ExtractData(); private: @@ -203,14 +157,6 @@ class CompleteParserRecorder : public ParserRecorder { void WriteString(Vector<const char> str); - // For testing. Defined in test-parsing.cc. - friend struct CompleteParserRecorderFriend; - - void LogSymbol(int start, - int hash, - bool is_one_byte, - Vector<const byte> literal); - // Write a non-negative number to the symbol store. void WriteNumber(int number); @@ -220,12 +166,6 @@ class CompleteParserRecorder : public ParserRecorder { #ifdef DEBUG int prev_start_; #endif - - Collector<byte> literal_chars_; - Collector<byte> symbol_store_; - Collector<Key> symbol_keys_; - HashMap string_table_; - int symbol_id_; }; diff --git a/deps/v8/src/preparser.cc b/deps/v8/src/preparser.cc index 9bcc88002..96623b952 100644 --- a/deps/v8/src/preparser.cc +++ b/deps/v8/src/preparser.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <cmath> @@ -56,17 +33,6 @@ namespace v8 { namespace internal { -void PreParserTraits::CheckStrictModeLValue(PreParserExpression expression, - bool* ok) { - if (expression.IsIdentifier() && - expression.AsIdentifier().IsEvalOrArguments()) { - pre_parser_->ReportMessage("strict_eval_arguments", - Vector<const char*>::empty()); - *ok = false; - } -} - - void PreParserTraits::ReportMessageAt(Scanner::Location location, const char* message, Vector<const char*> args, @@ -83,8 +49,8 @@ void PreParserTraits::ReportMessageAt(Scanner::Location location, const char* type, const char* name_opt, bool is_reference_error) { - pre_parser_->log_ - ->LogMessage(location.beg_pos, location.end_pos, type, name_opt); + pre_parser_->log_->LogMessage(location.beg_pos, location.end_pos, type, + name_opt, is_reference_error); } @@ -93,12 +59,12 @@ void PreParserTraits::ReportMessageAt(int start_pos, const char* type, const char* name_opt, bool is_reference_error) { - pre_parser_->log_->LogMessage(start_pos, end_pos, type, name_opt); + pre_parser_->log_->LogMessage(start_pos, end_pos, type, name_opt, + is_reference_error); } PreParserIdentifier PreParserTraits::GetSymbol(Scanner* scanner) { - pre_parser_->LogSymbol(); if (scanner->current_token() == Token::FUTURE_RESERVED_WORD) { return PreParserIdentifier::FutureReserved(); } else if (scanner->current_token() == @@ -119,7 +85,6 @@ PreParserIdentifier PreParserTraits::GetSymbol(Scanner* scanner) { PreParserExpression PreParserTraits::ExpressionFromString( int pos, Scanner* scanner, PreParserFactory* factory) { - pre_parser_->LogSymbol(); if (scanner->UnescapedLiteralMatches("use strict", 10)) { return PreParserExpression::UseStrictStringLiteral(); } @@ -592,7 +557,7 @@ PreParser::Statement PreParser::ParseReturnStatement(bool* ok) { // ReturnStatement :: // 'return' [no line terminator] Expression? ';' - // Consume the return token. It is necessary to do the before + // Consume the return token. It is necessary to do before // reporting any errors on it, because of the way errors are // reported (underlining). Expect(Token::RETURN, CHECK_OK); @@ -942,10 +907,7 @@ PreParser::Expression PreParser::ParseFunctionLiteral( void PreParser::ParseLazyFunctionLiteralBody(bool* ok) { int body_start = position(); - bool is_logging = log_->ShouldLogSymbols(); - if (is_logging) log_->PauseRecording(); ParseSourceElements(Token::RBRACE, ok); - if (is_logging) log_->ResumeRecording(); if (!*ok) return; // Position right after terminal '}'. @@ -976,11 +938,4 @@ PreParser::Expression PreParser::ParseV8Intrinsic(bool* ok) { #undef CHECK_OK -void PreParser::LogSymbol() { - if (log_->ShouldLogSymbols()) { - scanner()->LogSymbol(log_, position()); - } -} - - } } // v8::internal diff --git a/deps/v8/src/preparser.h b/deps/v8/src/preparser.h index 080b77287..673365857 100644 --- a/deps/v8/src/preparser.h +++ b/deps/v8/src/preparser.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PREPARSER_H #define V8_PREPARSER_H @@ -228,8 +205,8 @@ class ParserBase : public Traits { FunctionState* outer_function_state_; typename Traits::Type::Scope** scope_stack_; typename Traits::Type::Scope* outer_scope_; - Isolate* isolate_; // Only used by ParserTraits. int saved_ast_node_id_; // Only used by ParserTraits. + typename Traits::Type::Zone* extra_param_; typename Traits::Type::Factory factory_; friend class ParserTraits; @@ -425,6 +402,13 @@ class ParserBase : public Traits { ExpressionT ParseMemberExpressionContinuation(ExpressionT expression, bool* ok); + // Checks if the expression is a valid reference expression (e.g., on the + // left-hand side of assignments). Although ruled out by ECMA as early errors, + // we allow calls for web compatibility and rewrite them to a runtime throw. + ExpressionT CheckAndRewriteReferenceExpression( + ExpressionT expression, + Scanner::Location location, const char* message, bool* ok); + // Used to detect duplicates in object literals. Each of the values // kGetterProperty, kSetterProperty and kValueProperty represents // a type of object literal property. When parsing a property, its @@ -589,10 +573,14 @@ class PreParserExpression { return PreParserExpression(kPropertyExpression); } + static PreParserExpression Call() { + return PreParserExpression(kCallExpression); + } + bool IsIdentifier() { return (code_ & kIdentifierFlag) != 0; } - // Only works corretly if it is actually an identifier expression. PreParserIdentifier AsIdentifier() { + ASSERT(IsIdentifier()); return PreParserIdentifier( static_cast<PreParserIdentifier::Type>(code_ >> kIdentifierShift)); } @@ -611,13 +599,14 @@ class PreParserExpression { return code_ == kPropertyExpression || code_ == kThisPropertyExpression; } - bool IsValidLeftHandSide() { + bool IsCall() { return code_ == kCallExpression; } + + bool IsValidReferenceExpression() { return IsIdentifier() || IsProperty(); } // At the moment PreParser doesn't track these expression types. bool IsFunctionLiteral() const { return false; } - bool IsCall() const { return false; } bool IsCallNew() const { return false; } PreParserExpression AsFunctionLiteral() { return *this; } @@ -651,7 +640,8 @@ class PreParserExpression { // 2 least significant bits for flags. kThisExpression = 1 << 2, kThisPropertyExpression = 2 << 2, - kPropertyExpression = 3 << 2 + kPropertyExpression = 3 << 2, + kCallExpression = 4 << 2 }; explicit PreParserExpression(int expression_code) : code_(expression_code) {} @@ -674,6 +664,67 @@ class PreParserExpressionList { }; +class PreParserStatement { + public: + static PreParserStatement Default() { + return PreParserStatement(kUnknownStatement); + } + + static PreParserStatement FunctionDeclaration() { + return PreParserStatement(kFunctionDeclaration); + } + + // Creates expression statement from expression. + // Preserves being an unparenthesized string literal, possibly + // "use strict". + static PreParserStatement ExpressionStatement( + PreParserExpression expression) { + if (expression.IsUseStrictLiteral()) { + return PreParserStatement(kUseStrictExpressionStatement); + } + if (expression.IsStringLiteral()) { + return PreParserStatement(kStringLiteralExpressionStatement); + } + return Default(); + } + + bool IsStringLiteral() { + return code_ == kStringLiteralExpressionStatement; + } + + bool IsUseStrictLiteral() { + return code_ == kUseStrictExpressionStatement; + } + + bool IsFunctionDeclaration() { + return code_ == kFunctionDeclaration; + } + + private: + enum Type { + kUnknownStatement, + kStringLiteralExpressionStatement, + kUseStrictExpressionStatement, + kFunctionDeclaration + }; + + explicit PreParserStatement(Type code) : code_(code) {} + Type code_; +}; + + + +// PreParserStatementList doesn't actually store the statements because +// the PreParser does not need them. +class PreParserStatementList { + public: + // These functions make list->Add(some_expression) work as no-ops. + PreParserStatementList() {} + PreParserStatementList* operator->() { return this; } + void Add(PreParserStatement, void*) {} +}; + + class PreParserScope { public: explicit PreParserScope(PreParserScope* outer_scope, ScopeType scope_type) @@ -782,7 +833,7 @@ class PreParserFactory { PreParserExpression NewCall(PreParserExpression expression, PreParserExpressionList arguments, int pos) { - return PreParserExpression::Default(); + return PreParserExpression::Call(); } PreParserExpression NewCallNew(PreParserExpression expression, PreParserExpressionList arguments, @@ -817,6 +868,7 @@ class PreParserTraits { typedef PreParserExpression Literal; typedef PreParserExpressionList ExpressionList; typedef PreParserExpressionList PropertyList; + typedef PreParserStatementList StatementList; // For constructing objects returned by the traversing functions. typedef PreParserFactory Factory; @@ -829,7 +881,7 @@ class PreParserTraits { template<typename FunctionState> static void SetUpFunctionState(FunctionState* function_state, void*) {} template<typename FunctionState> - static void TearDownFunctionState(FunctionState* function_state) {} + static void TearDownFunctionState(FunctionState* function_state, void*) {} // Helper functions for recursive descent. static bool IsEvalOrArguments(PreParserIdentifier identifier) { @@ -845,6 +897,10 @@ class PreParserTraits { return expression.IsIdentifier(); } + static PreParserIdentifier AsIdentifier(PreParserExpression expression) { + return expression.AsIdentifier(); + } + static bool IsBoilerplateProperty(PreParserExpression property) { // PreParser doesn't count boilerplate properties. return false; @@ -883,10 +939,6 @@ class PreParserTraits { return expression; } - // Checks LHS expression for assignment and prefix/postfix increment/decrement - // in strict mode. - void CheckStrictModeLValue(PreParserExpression expression, bool* ok); - bool ShortcutNumericLiteralBinaryExpression(PreParserExpression* x, PreParserExpression y, Token::Value op, @@ -901,6 +953,18 @@ class PreParserTraits { return PreParserExpression::Default(); } + PreParserExpression NewThrowReferenceError(const char* type, int pos) { + return PreParserExpression::Default(); + } + PreParserExpression NewThrowSyntaxError( + const char* type, Handle<Object> arg, int pos) { + return PreParserExpression::Default(); + } + PreParserExpression NewThrowTypeError( + const char* type, Handle<Object> arg, int pos) { + return PreParserExpression::Default(); + } + // Reporting errors. void ReportMessageAt(Scanner::Location location, const char* message, @@ -968,6 +1032,10 @@ class PreParserTraits { return PreParserExpressionList(); } + static PreParserStatementList NewStatementList(int size, void* zone) { + return PreParserStatementList(); + } + static PreParserExpressionList NewPropertyList(int size, void* zone) { return PreParserExpressionList(); } @@ -1004,6 +1072,7 @@ class PreParser : public ParserBase<PreParserTraits> { public: typedef PreParserIdentifier Identifier; typedef PreParserExpression Expression; + typedef PreParserStatement Statement; enum PreParseResult { kPreParseStackOverflow, @@ -1065,52 +1134,6 @@ class PreParser : public ParserBase<PreParserTraits> { kHasNoInitializers }; - class Statement { - public: - static Statement Default() { - return Statement(kUnknownStatement); - } - - static Statement FunctionDeclaration() { - return Statement(kFunctionDeclaration); - } - - // Creates expression statement from expression. - // Preserves being an unparenthesized string literal, possibly - // "use strict". - static Statement ExpressionStatement(Expression expression) { - if (expression.IsUseStrictLiteral()) { - return Statement(kUseStrictExpressionStatement); - } - if (expression.IsStringLiteral()) { - return Statement(kStringLiteralExpressionStatement); - } - return Default(); - } - - bool IsStringLiteral() { - return code_ == kStringLiteralExpressionStatement; - } - - bool IsUseStrictLiteral() { - return code_ == kUseStrictExpressionStatement; - } - - bool IsFunctionDeclaration() { - return code_ == kFunctionDeclaration; - } - - private: - enum Type { - kUnknownStatement, - kStringLiteralExpressionStatement, - kUseStrictExpressionStatement, - kFunctionDeclaration - }; - - explicit Statement(Type code) : code_(code) {} - Type code_; - }; enum SourceElements { kUnknownSourceElements @@ -1158,11 +1181,6 @@ class PreParser : public ParserBase<PreParserTraits> { bool* ok); void ParseLazyFunctionLiteralBody(bool* ok); - // Logs the currently parsed literal as a symbol in the preparser data. - void LogSymbol(); - // Log the currently parsed string literal. - Expression GetStringSymbol(); - bool CheckInOrOf(bool accept_OF); }; @@ -1181,8 +1199,8 @@ ParserBase<Traits>::FunctionState::FunctionState( outer_function_state_(*function_state_stack), scope_stack_(scope_stack), outer_scope_(*scope_stack), - isolate_(NULL), saved_ast_node_id_(0), + extra_param_(extra_param), factory_(extra_param) { *scope_stack_ = scope; *function_state_stack = this; @@ -1194,7 +1212,7 @@ template<class Traits> ParserBase<Traits>::FunctionState::~FunctionState() { *scope_stack_ = outer_scope_; *function_state_stack_ = outer_function_state_; - Traits::TearDownFunctionState(this); + Traits::TearDownFunctionState(this, extra_param_); } @@ -1695,16 +1713,8 @@ ParserBase<Traits>::ParseAssignmentExpression(bool accept_IN, bool* ok) { return expression; } - if (!expression->IsValidLeftHandSide()) { - this->ReportMessageAt(lhs_location, "invalid_lhs_in_assignment", true); - *ok = false; - return this->EmptyExpression(); - } - - if (strict_mode() == STRICT) { - // Assignment to eval or arguments is disallowed in strict mode. - this->CheckStrictModeLValue(expression, CHECK_OK); - } + expression = this->CheckAndRewriteReferenceExpression( + expression, lhs_location, "invalid_lhs_in_assignment", CHECK_OK); expression = this->MarkExpressionAsLValue(expression); Token::Value op = Next(); // Get assignment operator. @@ -1864,17 +1874,9 @@ ParserBase<Traits>::ParseUnaryExpression(bool* ok) { } else if (Token::IsCountOp(op)) { op = Next(); Scanner::Location lhs_location = scanner()->peek_location(); - ExpressionT expression = ParseUnaryExpression(CHECK_OK); - if (!expression->IsValidLeftHandSide()) { - ReportMessageAt(lhs_location, "invalid_lhs_in_prefix_op", true); - *ok = false; - return this->EmptyExpression(); - } - - if (strict_mode() == STRICT) { - // Prefix expression operand in strict mode may not be eval or arguments. - this->CheckStrictModeLValue(expression, CHECK_OK); - } + ExpressionT expression = this->ParseUnaryExpression(CHECK_OK); + expression = this->CheckAndRewriteReferenceExpression( + expression, lhs_location, "invalid_lhs_in_prefix_op", CHECK_OK); this->MarkExpressionAsLValue(expression); return factory()->NewCountOperation(op, @@ -1898,16 +1900,8 @@ ParserBase<Traits>::ParsePostfixExpression(bool* ok) { ExpressionT expression = this->ParseLeftHandSideExpression(CHECK_OK); if (!scanner()->HasAnyLineTerminatorBeforeNext() && Token::IsCountOp(peek())) { - if (!expression->IsValidLeftHandSide()) { - ReportMessageAt(lhs_location, "invalid_lhs_in_postfix_op", true); - *ok = false; - return this->EmptyExpression(); - } - - if (strict_mode() == STRICT) { - // Postfix expression operand in strict mode may not be eval or arguments. - this->CheckStrictModeLValue(expression, CHECK_OK); - } + expression = this->CheckAndRewriteReferenceExpression( + expression, lhs_location, "invalid_lhs_in_postfix_op", CHECK_OK); expression = this->MarkExpressionAsLValue(expression); Token::Value next = Next(); @@ -2051,7 +2045,7 @@ ParserBase<Traits>::ParseMemberExpression(bool* ok) { Consume(Token::FUNCTION); int function_token_position = position(); bool is_generator = allow_generators() && Check(Token::MUL); - IdentifierT name; + IdentifierT name = this->EmptyIdentifier(); bool is_strict_reserved_name = false; Scanner::Location function_name_location = Scanner::Location::invalid(); FunctionLiteral::FunctionType function_type = @@ -2117,6 +2111,32 @@ ParserBase<Traits>::ParseMemberExpressionContinuation(ExpressionT expression, } +template <typename Traits> +typename ParserBase<Traits>::ExpressionT +ParserBase<Traits>::CheckAndRewriteReferenceExpression( + ExpressionT expression, + Scanner::Location location, const char* message, bool* ok) { + if (strict_mode() == STRICT && this->IsIdentifier(expression) && + this->IsEvalOrArguments(this->AsIdentifier(expression))) { + this->ReportMessageAt(location, "strict_eval_arguments", false); + *ok = false; + return this->EmptyExpression(); + } else if (expression->IsValidReferenceExpression()) { + return expression; + } else if (expression->IsCall()) { + // If it is a call, make it a runtime error for legacy web compatibility. + // Rewrite `expr' to `expr[throw ReferenceError]'. + int pos = location.beg_pos; + ExpressionT error = this->NewThrowReferenceError(message, pos); + return factory()->NewProperty(expression, error, pos); + } else { + this->ReportMessageAt(location, message, true); + *ok = false; + return this->EmptyExpression(); + } +} + + #undef CHECK_OK #undef CHECK_OK_CUSTOM diff --git a/deps/v8/src/prettyprinter.cc b/deps/v8/src/prettyprinter.cc index 130fec1b2..233d7c2fa 100644 --- a/deps/v8/src/prettyprinter.cc +++ b/deps/v8/src/prettyprinter.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> diff --git a/deps/v8/src/prettyprinter.h b/deps/v8/src/prettyprinter.h index a792720fe..e0467ca82 100644 --- a/deps/v8/src/prettyprinter.h +++ b/deps/v8/src/prettyprinter.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PRETTYPRINTER_H_ #define V8_PRETTYPRINTER_H_ diff --git a/deps/v8/src/profile-generator-inl.h b/deps/v8/src/profile-generator-inl.h index 9aeb8f5c2..9e8408d91 100644 --- a/deps/v8/src/profile-generator-inl.h +++ b/deps/v8/src/profile-generator-inl.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PROFILE_GENERATOR_INL_H_ #define V8_PROFILE_GENERATOR_INL_H_ diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc index 6bd446e0c..957f5dbae 100644 --- a/deps/v8/src/profile-generator.cc +++ b/deps/v8/src/profile-generator.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -355,19 +332,21 @@ void ProfileTree::TraverseDepthFirst(Callback* callback) { CpuProfile::CpuProfile(const char* title, bool record_samples) : title_(title), record_samples_(record_samples), - start_time_(Time::NowFromSystemTime()) { - timer_.Start(); + start_time_(TimeTicks::HighResolutionNow()) { } -void CpuProfile::AddPath(const Vector<CodeEntry*>& path) { +void CpuProfile::AddPath(TimeTicks timestamp, const Vector<CodeEntry*>& path) { ProfileNode* top_frame_node = top_down_.AddPathFromEnd(path); - if (record_samples_) samples_.Add(top_frame_node); + if (record_samples_) { + timestamps_.Add(timestamp); + samples_.Add(top_frame_node); + } } void CpuProfile::CalculateTotalTicksAndSamplingRate() { - end_time_ = start_time_ + timer_.Elapsed(); + end_time_ = TimeTicks::HighResolutionNow(); } @@ -546,13 +525,13 @@ void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) { void CpuProfilesCollection::AddPathToCurrentProfiles( - const Vector<CodeEntry*>& path) { + TimeTicks timestamp, const Vector<CodeEntry*>& path) { // As starting / stopping profiles is rare relatively to this // method, we don't bother minimizing the duration of lock holding, // e.g. copying contents of the list to a local vector. current_profiles_semaphore_.Wait(); for (int i = 0; i < current_profiles_.length(); ++i) { - current_profiles_[i]->AddPath(path); + current_profiles_[i]->AddPath(timestamp, path); } current_profiles_semaphore_.Signal(); } @@ -675,7 +654,7 @@ void ProfileGenerator::RecordTickSample(const TickSample& sample) { } } - profiles_->AddPathToCurrentProfiles(entries); + profiles_->AddPathToCurrentProfiles(sample.timestamp, entries); } diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h index 81980bfc9..1d8ad87cf 100644 --- a/deps/v8/src/profile-generator.h +++ b/deps/v8/src/profile-generator.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PROFILE_GENERATOR_H_ #define V8_PROFILE_GENERATOR_H_ @@ -199,7 +176,7 @@ class CpuProfile { CpuProfile(const char* title, bool record_samples); // Add pc -> ... -> main() call path to the profile. - void AddPath(const Vector<CodeEntry*>& path); + void AddPath(TimeTicks timestamp, const Vector<CodeEntry*>& path); void CalculateTotalTicksAndSamplingRate(); const char* title() const { return title_; } @@ -207,9 +184,10 @@ class CpuProfile { int samples_count() const { return samples_.length(); } ProfileNode* sample(int index) const { return samples_.at(index); } + TimeTicks sample_timestamp(int index) const { return timestamps_.at(index); } - Time start_time() const { return start_time_; } - Time end_time() const { return end_time_; } + TimeTicks start_time() const { return start_time_; } + TimeTicks end_time() const { return end_time_; } void UpdateTicksScale(); @@ -218,10 +196,10 @@ class CpuProfile { private: const char* title_; bool record_samples_; - Time start_time_; - Time end_time_; - ElapsedTimer timer_; + TimeTicks start_time_; + TimeTicks end_time_; List<ProfileNode*> samples_; + List<TimeTicks> timestamps_; ProfileTree top_down_; DISALLOW_COPY_AND_ASSIGN(CpuProfile); @@ -306,7 +284,8 @@ class CpuProfilesCollection { int column_number = v8::CpuProfileNode::kNoColumnNumberInfo); // Called from profile generator thread. - void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); + void AddPathToCurrentProfiles( + TimeTicks timestamp, const Vector<CodeEntry*>& path); // Limits the number of profiles that can be simultaneously collected. static const int kMaxSimultaneousProfiles = 100; diff --git a/deps/v8/src/promise.js b/deps/v8/src/promise.js index 50f91ae0b..fa650eaf0 100644 --- a/deps/v8/src/promise.js +++ b/deps/v8/src/promise.js @@ -1,30 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; @@ -37,14 +13,17 @@ var $Promise = function Promise(resolver) { if (resolver === promiseRaw) return; if (!%_IsConstructCall()) throw MakeTypeError('not_a_promise', [this]); - if (typeof resolver !== 'function') + if (!IS_SPEC_FUNCTION(resolver)) throw MakeTypeError('resolver_not_a_function', [resolver]); var promise = PromiseInit(this); try { + %DebugPromiseHandlePrologue(function() { return promise }); resolver(function(x) { PromiseResolve(promise, x) }, function(r) { PromiseReject(promise, r) }); } catch (e) { PromiseReject(promise, e); + } finally { + %DebugPromiseHandleEpilogue(); } } @@ -172,17 +151,20 @@ function PromiseCatch(onReject) { } function PromiseEnqueue(value, tasks) { - GetMicrotaskQueue().push(function() { + EnqueueMicrotask(function() { for (var i = 0; i < tasks.length; i += 2) { PromiseHandle(value, tasks[i], tasks[i + 1]) } }); - - %SetMicrotaskPending(true); } function PromiseHandle(value, handler, deferred) { try { + %DebugPromiseHandlePrologue( + function() { + var queue = GET_PRIVATE(deferred.promise, promiseOnReject); + return (queue && queue.length == 0) ? deferred.promise : UNDEFINED; + }); var result = handler(value); if (result === deferred.promise) throw MakeTypeError('promise_cyclic', [result]); @@ -190,9 +172,15 @@ function PromiseHandle(value, handler, deferred) { %_CallFunction(result, deferred.resolve, deferred.reject, PromiseChain); else deferred.resolve(result); - } catch(e) { - // TODO(rossberg): perhaps log uncaught exceptions below. - try { deferred.reject(e) } catch(e) {} + } catch (exception) { + try { + %DebugPromiseHandlePrologue(function() { return deferred.promise }); + deferred.reject(exception); + } catch (e) { } finally { + %DebugPromiseHandleEpilogue(); + } + } finally { + %DebugPromiseHandleEpilogue(); } } @@ -200,10 +188,8 @@ function PromiseHandle(value, handler, deferred) { // Multi-unwrapped chaining with thenable coercion. function PromiseThen(onResolve, onReject) { - onResolve = - IS_NULL_OR_UNDEFINED(onResolve) ? PromiseIdResolveHandler : onResolve; - onReject = - IS_NULL_OR_UNDEFINED(onReject) ? PromiseIdRejectHandler : onReject; + onResolve = IS_SPEC_FUNCTION(onResolve) ? onResolve : PromiseIdResolveHandler; + onReject = IS_SPEC_FUNCTION(onReject) ? onReject : PromiseIdRejectHandler; var that = this; var constructor = this.constructor; return %_CallFunction( @@ -324,3 +310,12 @@ function SetUpPromise() { } SetUpPromise(); + +// Functions to expose promise details to the debugger. +function GetPromiseStatus(promise) { + return GET_PRIVATE(promise, promiseStatus); +} + +function GetPromiseValue(promise) { + return GET_PRIVATE(promise, promiseValue); +} diff --git a/deps/v8/src/property-details-inl.h b/deps/v8/src/property-details-inl.h index 98eb1cf58..353f8f587 100644 --- a/deps/v8/src/property-details-inl.h +++ b/deps/v8/src/property-details-inl.h @@ -1,36 +1,13 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PROPERTY_DETAILS_INL_H_ #define V8_PROPERTY_DETAILS_INL_H_ +#include "conversions.h" #include "objects.h" #include "property-details.h" -#include "v8conversions.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/property-details.h b/deps/v8/src/property-details.h index 01050dbd4..cb33f9c9a 100644 --- a/deps/v8/src/property-details.h +++ b/deps/v8/src/property-details.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PROPERTY_DETAILS_H_ #define V8_PROPERTY_DETAILS_H_ @@ -77,9 +54,8 @@ enum PropertyType { // Only in lookup results, not in descriptors. HANDLER = 4, INTERCEPTOR = 5, - TRANSITION = 6, // Only used as a marker in LookupResult. - NONEXISTENT = 7 + NONEXISTENT = 6 }; diff --git a/deps/v8/src/property.cc b/deps/v8/src/property.cc index 2f72eec48..e7d0c4e2f 100644 --- a/deps/v8/src/property.cc +++ b/deps/v8/src/property.cc @@ -1,36 +1,14 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. -#include "v8.h" +#include "property.h" + +#include "handles-inl.h" namespace v8 { namespace internal { - void LookupResult::Iterate(ObjectVisitor* visitor) { LookupResult* current = this; // Could be NULL. while (current != NULL) { @@ -51,6 +29,11 @@ void LookupResult::Print(FILE* out) { PrintF(out, "LookupResult:\n"); PrintF(out, " -cacheable = %s\n", IsCacheable() ? "true" : "false"); PrintF(out, " -attributes = %x\n", GetAttributes()); + if (IsTransition()) { + PrintF(out, " -transition target:\n"); + GetTransitionTarget()->Print(out); + PrintF(out, "\n"); + } switch (type()) { case NORMAL: PrintF(out, " -type = normal\n"); @@ -64,8 +47,9 @@ void LookupResult::Print(FILE* out) { break; case FIELD: PrintF(out, " -type = field\n"); - PrintF(out, " -index = %d", GetFieldIndex().field_index()); - PrintF(out, "\n"); + PrintF(out, " -index = %d\n", GetFieldIndex().field_index()); + PrintF(out, " -field type:\n"); + GetFieldType()->TypePrint(out); break; case CALLBACKS: PrintF(out, " -type = call backs\n"); @@ -78,29 +62,6 @@ void LookupResult::Print(FILE* out) { case INTERCEPTOR: PrintF(out, " -type = lookup interceptor\n"); break; - case TRANSITION: - switch (GetTransitionDetails().type()) { - case FIELD: - PrintF(out, " -type = map transition\n"); - PrintF(out, " -map:\n"); - GetTransitionTarget()->Print(out); - PrintF(out, "\n"); - return; - case CONSTANT: - PrintF(out, " -type = constant property transition\n"); - PrintF(out, " -map:\n"); - GetTransitionTarget()->Print(out); - PrintF(out, "\n"); - return; - case CALLBACKS: - PrintF(out, " -type = callbacks transition\n"); - PrintF(out, " -callback object:\n"); - GetCallbackObject()->Print(out); - return; - default: - UNREACHABLE(); - return; - } case NONEXISTENT: UNREACHABLE(); break; @@ -114,9 +75,6 @@ void Descriptor::Print(FILE* out) { PrintF(out, " @ "); GetValue()->ShortPrint(out); } - - #endif - } } // namespace v8::internal diff --git a/deps/v8/src/property.h b/deps/v8/src/property.h index baa5a0f99..c7a4e6a63 100644 --- a/deps/v8/src/property.h +++ b/deps/v8/src/property.h @@ -1,60 +1,33 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_PROPERTY_H_ #define V8_PROPERTY_H_ -#include "allocation.h" -#include "transitions.h" +#include "isolate.h" +#include "factory.h" +#include "types.h" namespace v8 { namespace internal { - // Abstraction for elements in instance-descriptor arrays. // // Each descriptor has a key, property attributes, property type, // property index (in the actual instance-descriptor array) and // optionally a piece of data. -// - class Descriptor BASE_EMBEDDED { public: - MUST_USE_RESULT MaybeObject* KeyToUniqueName() { + void KeyToUniqueName() { if (!key_->IsUniqueName()) { - MaybeObject* maybe_result = - key_->GetIsolate()->heap()->InternalizeString(String::cast(key_)); - if (!maybe_result->To(&key_)) return maybe_result; + key_ = key_->GetIsolate()->factory()->InternalizeString( + Handle<String>::cast(key_)); } - return key_; } - Name* GetKey() { return key_; } - Object* GetValue() { return value_; } + Handle<Name> GetKey() { return key_; } + Handle<Object> GetValue() { return value_; } PropertyDetails GetDetails() { return details_; } #ifdef OBJECT_PRINT @@ -64,26 +37,26 @@ class Descriptor BASE_EMBEDDED { void SetSortedKeyIndex(int index) { details_ = details_.set_pointer(index); } private: - Name* key_; - Object* value_; + Handle<Name> key_; + Handle<Object> value_; PropertyDetails details_; protected: Descriptor() : details_(Smi::FromInt(0)) {} - void Init(Name* key, Object* value, PropertyDetails details) { + void Init(Handle<Name> key, Handle<Object> value, PropertyDetails details) { key_ = key; value_ = value; details_ = details; } - Descriptor(Name* key, Object* value, PropertyDetails details) + Descriptor(Handle<Name> key, Handle<Object> value, PropertyDetails details) : key_(key), value_(value), details_(details) { } - Descriptor(Name* key, - Object* value, + Descriptor(Handle<Name> key, + Handle<Object> value, PropertyAttributes attributes, PropertyType type, Representation representation, @@ -93,34 +66,42 @@ class Descriptor BASE_EMBEDDED { details_(attributes, type, representation, field_index) { } friend class DescriptorArray; + friend class Map; }; -class FieldDescriptor: public Descriptor { +class FieldDescriptor V8_FINAL : public Descriptor { public: - FieldDescriptor(Name* key, + FieldDescriptor(Handle<Name> key, int field_index, PropertyAttributes attributes, Representation representation) - : Descriptor(key, Smi::FromInt(0), attributes, + : Descriptor(key, HeapType::Any(key->GetIsolate()), attributes, FIELD, representation, field_index) {} + FieldDescriptor(Handle<Name> key, + int field_index, + Handle<HeapType> field_type, + PropertyAttributes attributes, + Representation representation) + : Descriptor(key, field_type, attributes, FIELD, + representation, field_index) { } }; -class ConstantDescriptor: public Descriptor { +class ConstantDescriptor V8_FINAL : public Descriptor { public: - ConstantDescriptor(Name* key, - Object* value, + ConstantDescriptor(Handle<Name> key, + Handle<Object> value, PropertyAttributes attributes) : Descriptor(key, value, attributes, CONSTANT, value->OptimalRepresentation()) {} }; -class CallbacksDescriptor: public Descriptor { +class CallbacksDescriptor V8_FINAL : public Descriptor { public: - CallbacksDescriptor(Name* key, - Object* foreign, + CallbacksDescriptor(Handle<Name> key, + Handle<Object> foreign, PropertyAttributes attributes) : Descriptor(key, foreign, attributes, CALLBACKS, Representation::Tagged()) {} @@ -129,7 +110,7 @@ class CallbacksDescriptor: public Descriptor { // Holds a property index value distinguishing if it is a field index or an // index inside the object header. -class PropertyIndex { +class PropertyIndex V8_FINAL { public: static PropertyIndex NewFieldIndex(int index) { return PropertyIndex(index, false); @@ -177,7 +158,7 @@ class PropertyIndex { }; -class LookupResult BASE_EMBEDDED { +class LookupResult V8_FINAL BASE_EMBEDDED { public: explicit LookupResult(Isolate* isolate) : isolate_(isolate), @@ -205,18 +186,34 @@ class LookupResult BASE_EMBEDDED { number_ = number; } - bool CanHoldValue(Handle<Object> value) { - if (IsNormal()) return true; - ASSERT(!IsTransition()); - return value->FitsRepresentation(details_.representation()); + bool CanHoldValue(Handle<Object> value) const { + switch (type()) { + case NORMAL: + return true; + case FIELD: + return value->FitsRepresentation(representation()) && + GetFieldType()->NowContains(value); + case CONSTANT: + ASSERT(GetConstant() != *value || + value->FitsRepresentation(representation())); + return GetConstant() == *value; + case CALLBACKS: + case HANDLER: + case INTERCEPTOR: + return true; + case NONEXISTENT: + UNREACHABLE(); + } + UNREACHABLE(); + return true; } void TransitionResult(JSObject* holder, Map* target) { lookup_type_ = TRANSITION_TYPE; - details_ = PropertyDetails(NONE, TRANSITION, Representation::None()); + number_ = target->LastAdded(); + details_ = target->instance_descriptors()->GetDetails(number_); holder_ = holder; transition_ = target; - number_ = 0xAAAA; } void DictionaryResult(JSObject* holder, int entry) { @@ -266,20 +263,17 @@ class LookupResult BASE_EMBEDDED { Representation representation() const { ASSERT(IsFound()); - ASSERT(!IsTransition()); ASSERT(details_.type() != NONEXISTENT); return details_.representation(); } PropertyAttributes GetAttributes() const { - ASSERT(!IsTransition()); ASSERT(IsFound()); ASSERT(details_.type() != NONEXISTENT); return details_.attributes(); } PropertyDetails GetPropertyDetails() const { - ASSERT(!IsTransition()); return details_; } @@ -291,38 +285,40 @@ class LookupResult BASE_EMBEDDED { // Property callbacks does not include transitions to callbacks. bool IsPropertyCallbacks() const { ASSERT(!(details_.type() == CALLBACKS && !IsFound())); - return details_.type() == CALLBACKS; + return !IsTransition() && details_.type() == CALLBACKS; } bool IsReadOnly() const { ASSERT(IsFound()); - ASSERT(!IsTransition()); ASSERT(details_.type() != NONEXISTENT); return details_.IsReadOnly(); } bool IsField() const { ASSERT(!(details_.type() == FIELD && !IsFound())); - return details_.type() == FIELD; + return IsDescriptorOrDictionary() && type() == FIELD; } bool IsNormal() const { ASSERT(!(details_.type() == NORMAL && !IsFound())); - return details_.type() == NORMAL; + return IsDescriptorOrDictionary() && type() == NORMAL; } bool IsConstant() const { ASSERT(!(details_.type() == CONSTANT && !IsFound())); - return details_.type() == CONSTANT; + return IsDescriptorOrDictionary() && type() == CONSTANT; } bool IsConstantFunction() const { - return IsConstant() && GetValue()->IsJSFunction(); + return IsConstant() && GetConstant()->IsJSFunction(); } bool IsDontDelete() const { return details_.IsDontDelete(); } bool IsDontEnum() const { return details_.IsDontEnum(); } bool IsFound() const { return lookup_type_ != NOT_FOUND; } + bool IsDescriptorOrDictionary() const { + return lookup_type_ == DESCRIPTOR_TYPE || lookup_type_ == DICTIONARY_TYPE; + } bool IsTransition() const { return lookup_type_ == TRANSITION_TYPE; } bool IsHandler() const { return lookup_type_ == HANDLER_TYPE; } bool IsInterceptor() const { return lookup_type_ == INTERCEPTOR_TYPE; } @@ -333,20 +329,31 @@ class LookupResult BASE_EMBEDDED { } bool IsDataProperty() const { - switch (type()) { - case FIELD: - case NORMAL: - case CONSTANT: - return true; - case CALLBACKS: { - Object* callback = GetCallbackObject(); - return callback->IsAccessorInfo() || callback->IsForeign(); - } - case HANDLER: - case INTERCEPTOR: - case TRANSITION: - case NONEXISTENT: + switch (lookup_type_) { + case NOT_FOUND: + case TRANSITION_TYPE: + case HANDLER_TYPE: + case INTERCEPTOR_TYPE: return false; + + case DESCRIPTOR_TYPE: + case DICTIONARY_TYPE: + switch (type()) { + case FIELD: + case NORMAL: + case CONSTANT: + return true; + case CALLBACKS: { + Object* callback = GetCallbackObject(); + ASSERT(!callback->IsForeign()); + return callback->IsAccessorInfo(); + } + case HANDLER: + case INTERCEPTOR: + case NONEXISTENT: + UNREACHABLE(); + return false; + } } UNREACHABLE(); return false; @@ -356,45 +363,52 @@ class LookupResult BASE_EMBEDDED { void DisallowCaching() { cacheable_ = false; } Object* GetLazyValue() const { - switch (type()) { - case FIELD: - return holder()->RawFastPropertyAt(GetFieldIndex().field_index()); - case NORMAL: { - Object* value; - value = holder()->property_dictionary()->ValueAt(GetDictionaryEntry()); - if (holder()->IsGlobalObject()) { - value = PropertyCell::cast(value)->value(); - } - return value; - } - case CONSTANT: - return GetConstant(); - case CALLBACKS: - case HANDLER: - case INTERCEPTOR: - case TRANSITION: - case NONEXISTENT: + switch (lookup_type_) { + case NOT_FOUND: + case TRANSITION_TYPE: + case HANDLER_TYPE: + case INTERCEPTOR_TYPE: return isolate()->heap()->the_hole_value(); + + case DESCRIPTOR_TYPE: + case DICTIONARY_TYPE: + switch (type()) { + case FIELD: + return holder()->RawFastPropertyAt(GetFieldIndex().field_index()); + case NORMAL: { + Object* value = holder()->property_dictionary()->ValueAt( + GetDictionaryEntry()); + if (holder()->IsGlobalObject()) { + value = PropertyCell::cast(value)->value(); + } + return value; + } + case CONSTANT: + return GetConstant(); + case CALLBACKS: + return isolate()->heap()->the_hole_value(); + case HANDLER: + case INTERCEPTOR: + case NONEXISTENT: + UNREACHABLE(); + return NULL; + } } UNREACHABLE(); return NULL; } Map* GetTransitionTarget() const { - return transition_; - } - - PropertyDetails GetTransitionDetails() const { ASSERT(IsTransition()); - return transition_->GetLastDescriptorDetails(); + return transition_; } bool IsTransitionToField() const { - return IsTransition() && GetTransitionDetails().type() == FIELD; + return IsTransition() && details_.type() == FIELD; } bool IsTransitionToConstant() const { - return IsTransition() && GetTransitionDetails().type() == CONSTANT; + return IsTransition() && details_.type() == CONSTANT; } int GetDescriptorIndex() const { @@ -403,7 +417,8 @@ class LookupResult BASE_EMBEDDED { } PropertyIndex GetFieldIndex() const { - ASSERT(lookup_type_ == DESCRIPTOR_TYPE); + ASSERT(lookup_type_ == DESCRIPTOR_TYPE || + lookup_type_ == TRANSITION_TYPE); return PropertyIndex::NewFieldIndex(GetFieldIndexFromMap(holder()->map())); } @@ -436,7 +451,8 @@ class LookupResult BASE_EMBEDDED { } Object* GetCallbackObject() const { - ASSERT(type() == CALLBACKS && !IsTransition()); + ASSERT(!IsTransition()); + ASSERT(type() == CALLBACKS); return GetValue(); } @@ -447,6 +463,8 @@ class LookupResult BASE_EMBEDDED { Object* GetValue() const { if (lookup_type_ == DESCRIPTOR_TYPE) { return GetValueFromMap(holder()->map()); + } else if (lookup_type_ == TRANSITION_TYPE) { + return GetValueFromMap(transition_); } // In the dictionary case, the data is held in the value field. ASSERT(lookup_type_ == DICTIONARY_TYPE); @@ -454,17 +472,46 @@ class LookupResult BASE_EMBEDDED { } Object* GetValueFromMap(Map* map) const { - ASSERT(lookup_type_ == DESCRIPTOR_TYPE); + ASSERT(lookup_type_ == DESCRIPTOR_TYPE || + lookup_type_ == TRANSITION_TYPE); ASSERT(number_ < map->NumberOfOwnDescriptors()); return map->instance_descriptors()->GetValue(number_); } int GetFieldIndexFromMap(Map* map) const { - ASSERT(lookup_type_ == DESCRIPTOR_TYPE); + ASSERT(lookup_type_ == DESCRIPTOR_TYPE || + lookup_type_ == TRANSITION_TYPE); ASSERT(number_ < map->NumberOfOwnDescriptors()); return map->instance_descriptors()->GetFieldIndex(number_); } + HeapType* GetFieldType() const { + ASSERT(type() == FIELD); + if (lookup_type_ == DESCRIPTOR_TYPE) { + return GetFieldTypeFromMap(holder()->map()); + } + ASSERT(lookup_type_ == TRANSITION_TYPE); + return GetFieldTypeFromMap(transition_); + } + + HeapType* GetFieldTypeFromMap(Map* map) const { + ASSERT(lookup_type_ == DESCRIPTOR_TYPE || + lookup_type_ == TRANSITION_TYPE); + ASSERT(number_ < map->NumberOfOwnDescriptors()); + return map->instance_descriptors()->GetFieldType(number_); + } + + Map* GetFieldOwner() const { + return GetFieldOwnerFromMap(holder()->map()); + } + + Map* GetFieldOwnerFromMap(Map* map) const { + ASSERT(lookup_type_ == DESCRIPTOR_TYPE || + lookup_type_ == TRANSITION_TYPE); + ASSERT(number_ < map->NumberOfOwnDescriptors()); + return map->FindFieldOwner(number_); + } + void Iterate(ObjectVisitor* visitor); private: @@ -488,7 +535,6 @@ class LookupResult BASE_EMBEDDED { PropertyDetails details_; }; - } } // namespace v8::internal #endif // V8_PROPERTY_H_ diff --git a/deps/v8/src/proxy.js b/deps/v8/src/proxy.js index 06be087c7..99f9dab9f 100644 --- a/deps/v8/src/proxy.js +++ b/deps/v8/src/proxy.js @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; diff --git a/deps/v8/src/qnx-math.h b/deps/v8/src/qnx-math.h index bd8474599..8cf65d208 100644 --- a/deps/v8/src/qnx-math.h +++ b/deps/v8/src/qnx-math.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_QNX_MATH_H_ #define V8_QNX_MATH_H_ diff --git a/deps/v8/src/regexp-macro-assembler-irregexp-inl.h b/deps/v8/src/regexp-macro-assembler-irregexp-inl.h index a767ec008..8fe6a4277 100644 --- a/deps/v8/src/regexp-macro-assembler-irregexp-inl.h +++ b/deps/v8/src/regexp-macro-assembler-irregexp-inl.h @@ -1,29 +1,6 @@ // Copyright 2008-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // A light-weight assembler for the Irregexp byte code. diff --git a/deps/v8/src/regexp-macro-assembler-irregexp.cc b/deps/v8/src/regexp-macro-assembler-irregexp.cc index 3b9a2f660..368fb3006 100644 --- a/deps/v8/src/regexp-macro-assembler-irregexp.cc +++ b/deps/v8/src/regexp-macro-assembler-irregexp.cc @@ -1,29 +1,6 @@ // Copyright 2008-2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "ast.h" diff --git a/deps/v8/src/regexp-macro-assembler-irregexp.h b/deps/v8/src/regexp-macro-assembler-irregexp.h index f8a412d4f..54afe92e1 100644 --- a/deps/v8/src/regexp-macro-assembler-irregexp.h +++ b/deps/v8/src/regexp-macro-assembler-irregexp.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_REGEXP_MACRO_ASSEMBLER_IRREGEXP_H_ #define V8_REGEXP_MACRO_ASSEMBLER_IRREGEXP_H_ diff --git a/deps/v8/src/regexp-macro-assembler-tracer.cc b/deps/v8/src/regexp-macro-assembler-tracer.cc index 75e439247..c307eaf61 100644 --- a/deps/v8/src/regexp-macro-assembler-tracer.cc +++ b/deps/v8/src/regexp-macro-assembler-tracer.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "ast.h" diff --git a/deps/v8/src/regexp-macro-assembler-tracer.h b/deps/v8/src/regexp-macro-assembler-tracer.h index 852fb8041..ac76cfdb0 100644 --- a/deps/v8/src/regexp-macro-assembler-tracer.h +++ b/deps/v8/src/regexp-macro-assembler-tracer.h @@ -1,29 +1,6 @@ // Copyright 2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_REGEXP_MACRO_ASSEMBLER_TRACER_H_ #define V8_REGEXP_MACRO_ASSEMBLER_TRACER_H_ diff --git a/deps/v8/src/regexp-macro-assembler.cc b/deps/v8/src/regexp-macro-assembler.cc index 7d027f880..a522f97c6 100644 --- a/deps/v8/src/regexp-macro-assembler.cc +++ b/deps/v8/src/regexp-macro-assembler.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "ast.h" diff --git a/deps/v8/src/regexp-macro-assembler.h b/deps/v8/src/regexp-macro-assembler.h index fc3100867..8d9129504 100644 --- a/deps/v8/src/regexp-macro-assembler.h +++ b/deps/v8/src/regexp-macro-assembler.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_REGEXP_MACRO_ASSEMBLER_H_ #define V8_REGEXP_MACRO_ASSEMBLER_H_ diff --git a/deps/v8/src/regexp-stack.cc b/deps/v8/src/regexp-stack.cc index f3af490f1..5e250dd85 100644 --- a/deps/v8/src/regexp-stack.cc +++ b/deps/v8/src/regexp-stack.cc @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "regexp-stack.h" diff --git a/deps/v8/src/regexp-stack.h b/deps/v8/src/regexp-stack.h index 5684239f9..745782d73 100644 --- a/deps/v8/src/regexp-stack.h +++ b/deps/v8/src/regexp-stack.h @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_REGEXP_STACK_H_ #define V8_REGEXP_STACK_H_ diff --git a/deps/v8/src/regexp.js b/deps/v8/src/regexp.js index 22b08775b..6a0e2b5d9 100644 --- a/deps/v8/src/regexp.js +++ b/deps/v8/src/regexp.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declaration has been made // in runtime.js: @@ -185,7 +162,6 @@ function RegExpExec(string) { i = 0; } - %_Log('regexp', 'regexp-exec,%0r,%1S,%2i', [this, string, lastIndex]); // matchIndices is either null or the lastMatchInfo array. var matchIndices = %_RegExpExec(this, string, i, lastMatchInfo); @@ -229,7 +205,6 @@ function RegExpTest(string) { this.lastIndex = 0; return false; } - %_Log('regexp', 'regexp-exec,%0r,%1S,%2i', [this, string, lastIndex]); // matchIndices is either null or the lastMatchInfo array. var matchIndices = %_RegExpExec(this, string, i, lastMatchInfo); if (IS_NULL(matchIndices)) { @@ -250,7 +225,6 @@ function RegExpTest(string) { %_StringCharCodeAt(regexp.source, 2) != 63) { // '?' regexp = TrimRegExp(regexp); } - %_Log('regexp', 'regexp-exec,%0r,%1S,%2i', [regexp, string, lastIndex]); // matchIndices is either null or the lastMatchInfo array. var matchIndices = %_RegExpExec(regexp, string, 0, lastMatchInfo); if (IS_NULL(matchIndices)) { diff --git a/deps/v8/src/rewriter.cc b/deps/v8/src/rewriter.cc index 2b68ed1c9..27f03dc7a 100644 --- a/deps/v8/src/rewriter.cc +++ b/deps/v8/src/rewriter.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/rewriter.h b/deps/v8/src/rewriter.h index 59914d97f..0423802ba 100644 --- a/deps/v8/src/rewriter.h +++ b/deps/v8/src/rewriter.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_REWRITER_H_ #define V8_REWRITER_H_ diff --git a/deps/v8/src/runtime-profiler.cc b/deps/v8/src/runtime-profiler.cc index 5784e4d5a..69229871e 100644 --- a/deps/v8/src/runtime-profiler.cc +++ b/deps/v8/src/runtime-profiler.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -185,6 +162,16 @@ void RuntimeProfiler::OptimizeNow() { SharedFunctionInfo* shared = function->shared(); Code* shared_code = shared->code(); + List<JSFunction*> functions(4); + frame->GetFunctions(&functions); + for (int i = functions.length(); --i >= 0; ) { + SharedFunctionInfo* shared_function_info = functions[i]->shared(); + int ticks = shared_function_info->profiler_ticks(); + if (ticks < Smi::kMaxValue) { + shared_function_info->set_profiler_ticks(ticks + 1); + } + } + if (shared_code->kind() != Code::FUNCTION) continue; if (function->IsInOptimizationQueue()) continue; diff --git a/deps/v8/src/runtime-profiler.h b/deps/v8/src/runtime-profiler.h index efd9b50eb..450910ca8 100644 --- a/deps/v8/src/runtime-profiler.h +++ b/deps/v8/src/runtime-profiler.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_RUNTIME_PROFILER_H_ #define V8_RUNTIME_PROFILER_H_ diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc index 5142fd33d..b82d377f7 100644 --- a/deps/v8/src/runtime.cc +++ b/deps/v8/src/runtime.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdlib.h> #include <limits> @@ -38,6 +15,7 @@ #include "codegen.h" #include "compilation-cache.h" #include "compiler.h" +#include "conversions.h" #include "cpu.h" #include "cpu-profiler.h" #include "dateparser-inl.h" @@ -63,7 +41,6 @@ #include "string-search.h" #include "stub-cache.h" #include "uri.h" -#include "v8conversions.h" #include "v8threads.h" #include "vm-state-inl.h" @@ -105,6 +82,12 @@ namespace internal { #define RUNTIME_ASSERT(value) \ if (!(value)) return isolate->ThrowIllegalOperation(); +#define RUNTIME_ASSERT_HANDLIFIED(value, T) \ + if (!(value)) { \ + isolate->ThrowIllegalOperation(); \ + return MaybeHandle<T>(); \ + } + // Cast the given object to a value of the specified type and store // it in a variable with the given name. If the object is not of the // expected type call IllegalOperation and return. @@ -116,6 +99,10 @@ namespace internal { RUNTIME_ASSERT(args[index]->Is##Type()); \ Handle<Type> name = args.at<Type>(index); +#define CONVERT_NUMBER_ARG_HANDLE_CHECKED(name, index) \ + RUNTIME_ASSERT(args[index]->IsNumber()); \ + Handle<Object> name = args.at<Object>(index); + // Cast the given object to a boolean and store it in a variable with // the given name. If the object is not a boolean call IllegalOperation // and return. @@ -209,19 +196,17 @@ static Handle<Map> ComputeObjectLiteralMap( return isolate->factory()->ObjectLiteralMapFromCache(context, keys); } *is_result_from_cache = false; - return isolate->factory()->CopyMap( - Handle<Map>(context->object_function()->initial_map()), - number_of_properties); + return Map::Create(handle(context->object_function()), number_of_properties); } -static Handle<Object> CreateLiteralBoilerplate( +MUST_USE_RESULT static MaybeHandle<Object> CreateLiteralBoilerplate( Isolate* isolate, Handle<FixedArray> literals, Handle<FixedArray> constant_properties); -static Handle<Object> CreateObjectLiteralBoilerplate( +MUST_USE_RESULT static MaybeHandle<Object> CreateObjectLiteralBoilerplate( Isolate* isolate, Handle<FixedArray> literals, Handle<FixedArray> constant_properties, @@ -276,27 +261,29 @@ static Handle<Object> CreateObjectLiteralBoilerplate( // The value contains the constant_properties of a // simple object or array literal. Handle<FixedArray> array = Handle<FixedArray>::cast(value); - value = CreateLiteralBoilerplate(isolate, literals, array); - if (value.is_null()) return value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, + CreateLiteralBoilerplate(isolate, literals, array), + Object); } - Handle<Object> result; + MaybeHandle<Object> maybe_result; uint32_t element_index = 0; StoreMode mode = value->IsJSObject() ? FORCE_FIELD : ALLOW_AS_CONSTANT; if (key->IsInternalizedString()) { if (Handle<String>::cast(key)->AsArrayIndex(&element_index)) { // Array index as string (uint32). - result = JSObject::SetOwnElement( + maybe_result = JSObject::SetOwnElement( boilerplate, element_index, value, SLOPPY); } else { Handle<String> name(String::cast(*key)); ASSERT(!name->AsArrayIndex(&element_index)); - result = JSObject::SetLocalPropertyIgnoreAttributes( + maybe_result = JSObject::SetLocalPropertyIgnoreAttributes( boilerplate, name, value, NONE, Object::OPTIMAL_REPRESENTATION, mode); } } else if (key->ToArrayIndex(&element_index)) { // Array index (uint32). - result = JSObject::SetOwnElement( + maybe_result = JSObject::SetOwnElement( boilerplate, element_index, value, SLOPPY); } else { // Non-uint32 number. @@ -305,9 +292,8 @@ static Handle<Object> CreateObjectLiteralBoilerplate( char arr[100]; Vector<char> buffer(arr, ARRAY_SIZE(arr)); const char* str = DoubleToCString(num, buffer); - Handle<String> name = - isolate->factory()->NewStringFromAscii(CStrVector(str)); - result = JSObject::SetLocalPropertyIgnoreAttributes( + Handle<String> name = isolate->factory()->NewStringFromAsciiChecked(str); + maybe_result = JSObject::SetLocalPropertyIgnoreAttributes( boilerplate, name, value, NONE, Object::OPTIMAL_REPRESENTATION, mode); } @@ -315,7 +301,7 @@ static Handle<Object> CreateObjectLiteralBoilerplate( // exception, the exception is converted to an empty handle in // the handle based operations. In that case, we need to // convert back to an exception. - if (result.is_null()) return result; + RETURN_ON_EXCEPTION(isolate, maybe_result, Object); } // Transform to fast properties if necessary. For object literals with @@ -331,25 +317,30 @@ static Handle<Object> CreateObjectLiteralBoilerplate( } -MaybeObject* TransitionElements(Handle<Object> object, - ElementsKind to_kind, - Isolate* isolate) { +MUST_USE_RESULT static MaybeHandle<Object> TransitionElements( + Handle<Object> object, + ElementsKind to_kind, + Isolate* isolate) { HandleScope scope(isolate); - if (!object->IsJSObject()) return isolate->ThrowIllegalOperation(); + if (!object->IsJSObject()) { + isolate->ThrowIllegalOperation(); + return MaybeHandle<Object>(); + } ElementsKind from_kind = Handle<JSObject>::cast(object)->map()->elements_kind(); if (Map::IsValidElementsTransition(from_kind, to_kind)) { JSObject::TransitionElementsKind(Handle<JSObject>::cast(object), to_kind); - return *object; + return object; } - return isolate->ThrowIllegalOperation(); + isolate->ThrowIllegalOperation(); + return MaybeHandle<Object>(); } static const int kSmiLiteralMinimumLength = 1024; -Handle<Object> Runtime::CreateArrayLiteralBoilerplate( +MaybeHandle<Object> Runtime::CreateArrayLiteralBoilerplate( Isolate* isolate, Handle<FixedArray> literals, Handle<FixedArray> elements) { @@ -368,14 +359,14 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate( Handle<FixedArrayBase> constant_elements_values( FixedArrayBase::cast(elements->get(1))); - ASSERT(IsFastElementsKind(constant_elements_kind)); - Context* native_context = isolate->context()->native_context(); - Object* maybe_maps_array = native_context->js_array_maps(); - ASSERT(!maybe_maps_array->IsUndefined()); - Object* maybe_map = FixedArray::cast(maybe_maps_array)->get( - constant_elements_kind); - ASSERT(maybe_map->IsMap()); - object->set_map(Map::cast(maybe_map)); + { DisallowHeapAllocation no_gc; + ASSERT(IsFastElementsKind(constant_elements_kind)); + Context* native_context = isolate->context()->native_context(); + Object* maps_array = native_context->js_array_maps(); + ASSERT(!maps_array->IsUndefined()); + Object* map = FixedArray::cast(maps_array)->get(constant_elements_kind); + object->set_map(Map::cast(map)); + } Handle<FixedArrayBase> copied_elements_values; if (IsFastDoubleElementsKind(constant_elements_kind)) { @@ -403,14 +394,15 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate( isolate->factory()->CopyFixedArray(fixed_array_values); copied_elements_values = fixed_array_values_copy; for (int i = 0; i < fixed_array_values->length(); i++) { - Object* current = fixed_array_values->get(i); - if (current->IsFixedArray()) { + if (fixed_array_values->get(i)->IsFixedArray()) { // The value contains the constant_properties of a // simple object or array literal. Handle<FixedArray> fa(FixedArray::cast(fixed_array_values->get(i))); - Handle<Object> result = - CreateLiteralBoilerplate(isolate, literals, fa); - if (result.is_null()) return result; + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + CreateLiteralBoilerplate(isolate, literals, fa), + Object); fixed_array_values_copy->set(i, *result); } } @@ -426,20 +418,19 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate( ElementsKind elements_kind = object->GetElementsKind(); if (!IsFastObjectElementsKind(elements_kind)) { if (IsFastHoleyElementsKind(elements_kind)) { - CHECK(!TransitionElements(object, FAST_HOLEY_ELEMENTS, - isolate)->IsFailure()); + TransitionElements(object, FAST_HOLEY_ELEMENTS, isolate).Check(); } else { - CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure()); + TransitionElements(object, FAST_ELEMENTS, isolate).Check(); } } } - object->ValidateElements(); + JSObject::ValidateElements(object); return object; } -static Handle<Object> CreateLiteralBoilerplate( +MUST_USE_RESULT static MaybeHandle<Object> CreateLiteralBoilerplate( Isolate* isolate, Handle<FixedArray> literals, Handle<FixedArray> array) { @@ -463,12 +454,12 @@ static Handle<Object> CreateLiteralBoilerplate( isolate, literals, elements); default: UNREACHABLE(); - return Handle<Object>::null(); + return MaybeHandle<Object>(); } } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateObjectLiteral) { +RUNTIME_FUNCTION(RuntimeHidden_CreateObjectLiteral) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0); @@ -478,24 +469,29 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateObjectLiteral) { bool should_have_fast_elements = (flags & ObjectLiteral::kFastElements) != 0; bool has_function_literal = (flags & ObjectLiteral::kHasFunction) != 0; + RUNTIME_ASSERT(literals_index >= 0 && literals_index < literals->length()); + // Check if boilerplate exists. If not, create it first. Handle<Object> literal_site(literals->get(literals_index), isolate); Handle<AllocationSite> site; Handle<JSObject> boilerplate; if (*literal_site == isolate->heap()->undefined_value()) { - Handle<Object> raw_boilerplate = CreateObjectLiteralBoilerplate( - isolate, - literals, - constant_properties, - should_have_fast_elements, - has_function_literal); - RETURN_IF_EMPTY_HANDLE(isolate, raw_boilerplate); + Handle<Object> raw_boilerplate; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, raw_boilerplate, + CreateObjectLiteralBoilerplate( + isolate, + literals, + constant_properties, + should_have_fast_elements, + has_function_literal)); boilerplate = Handle<JSObject>::cast(raw_boilerplate); AllocationSiteCreationContext creation_context(isolate); site = creation_context.EnterNewScope(); - RETURN_IF_EMPTY_HANDLE(isolate, - JSObject::DeepWalk(boilerplate, &creation_context)); + RETURN_FAILURE_ON_EXCEPTION( + isolate, + JSObject::DeepWalk(boilerplate, &creation_context)); creation_context.ExitScope(site, boilerplate); // Update the functions literal and return the boilerplate. @@ -508,14 +504,16 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateObjectLiteral) { AllocationSiteUsageContext usage_context(isolate, site, true); usage_context.EnterNewScope(); - Handle<Object> copy = JSObject::DeepCopy(boilerplate, &usage_context); + MaybeHandle<Object> maybe_copy = JSObject::DeepCopy( + boilerplate, &usage_context); usage_context.ExitScope(site, boilerplate); - RETURN_IF_EMPTY_HANDLE(isolate, copy); + Handle<Object> copy; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, copy, maybe_copy); return *copy; } -static Handle<AllocationSite> GetLiteralAllocationSite( +MUST_USE_RESULT static MaybeHandle<AllocationSite> GetLiteralAllocationSite( Isolate* isolate, Handle<FixedArray> literals, int literals_index, @@ -525,9 +523,11 @@ static Handle<AllocationSite> GetLiteralAllocationSite( Handle<AllocationSite> site; if (*literal_site == isolate->heap()->undefined_value()) { ASSERT(*elements != isolate->heap()->empty_fixed_array()); - Handle<Object> boilerplate = - Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements); - if (boilerplate.is_null()) return Handle<AllocationSite>::null(); + Handle<Object> boilerplate; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, boilerplate, + Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements), + AllocationSite); AllocationSiteCreationContext creation_context(isolate); site = creation_context.EnterNewScope(); @@ -546,14 +546,18 @@ static Handle<AllocationSite> GetLiteralAllocationSite( } -static MaybeObject* CreateArrayLiteralImpl(Isolate* isolate, +static MaybeHandle<JSObject> CreateArrayLiteralImpl(Isolate* isolate, Handle<FixedArray> literals, int literals_index, Handle<FixedArray> elements, int flags) { - Handle<AllocationSite> site = GetLiteralAllocationSite(isolate, literals, - literals_index, elements); - RETURN_IF_EMPTY_HANDLE(isolate, site); + RUNTIME_ASSERT_HANDLIFIED(literals_index >= 0 && + literals_index < literals->length(), JSObject); + Handle<AllocationSite> site; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, site, + GetLiteralAllocationSite(isolate, literals, literals_index, elements), + JSObject); bool enable_mementos = (flags & ArrayLiteral::kDisableMementos) == 0; Handle<JSObject> boilerplate(JSObject::cast(site->transition_info())); @@ -562,15 +566,14 @@ static MaybeObject* CreateArrayLiteralImpl(Isolate* isolate, JSObject::DeepCopyHints hints = (flags & ArrayLiteral::kShallowElements) == 0 ? JSObject::kNoHints : JSObject::kObjectIsShallowArray; - Handle<JSObject> copy = JSObject::DeepCopy(boilerplate, &usage_context, - hints); + MaybeHandle<JSObject> copy = JSObject::DeepCopy(boilerplate, &usage_context, + hints); usage_context.ExitScope(site, boilerplate); - RETURN_IF_EMPTY_HANDLE(isolate, copy); - return *copy; + return copy; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateArrayLiteral) { +RUNTIME_FUNCTION(RuntimeHidden_CreateArrayLiteral) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0); @@ -578,76 +581,83 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateArrayLiteral) { CONVERT_ARG_HANDLE_CHECKED(FixedArray, elements, 2); CONVERT_SMI_ARG_CHECKED(flags, 3); - return CreateArrayLiteralImpl(isolate, literals, literals_index, elements, - flags); + Handle<JSObject> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, + CreateArrayLiteralImpl(isolate, literals, literals_index, elements, + flags)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateArrayLiteralStubBailout) { +RUNTIME_FUNCTION(RuntimeHidden_CreateArrayLiteralStubBailout) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0); CONVERT_SMI_ARG_CHECKED(literals_index, 1); CONVERT_ARG_HANDLE_CHECKED(FixedArray, elements, 2); - return CreateArrayLiteralImpl(isolate, literals, literals_index, elements, - ArrayLiteral::kShallowElements); + Handle<JSObject> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, + CreateArrayLiteralImpl(isolate, literals, literals_index, elements, + ArrayLiteral::kShallowElements)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateSymbol) { +RUNTIME_FUNCTION(Runtime_CreateSymbol) { HandleScope scope(isolate); ASSERT(args.length() == 1); - Handle<Object> name(args[0], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, name, 0); RUNTIME_ASSERT(name->IsString() || name->IsUndefined()); - Symbol* symbol; - MaybeObject* maybe = isolate->heap()->AllocateSymbol(); - if (!maybe->To(&symbol)) return maybe; + Handle<Symbol> symbol = isolate->factory()->NewSymbol(); if (name->IsString()) symbol->set_name(*name); - return symbol; + return *symbol; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreatePrivateSymbol) { +RUNTIME_FUNCTION(Runtime_CreatePrivateSymbol) { HandleScope scope(isolate); ASSERT(args.length() == 1); - Handle<Object> name(args[0], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, name, 0); RUNTIME_ASSERT(name->IsString() || name->IsUndefined()); - Symbol* symbol; - MaybeObject* maybe = isolate->heap()->AllocatePrivateSymbol(); - if (!maybe->To(&symbol)) return maybe; + Handle<Symbol> symbol = isolate->factory()->NewPrivateSymbol(); if (name->IsString()) symbol->set_name(*name); - return symbol; + return *symbol; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateGlobalPrivateSymbol) { +RUNTIME_FUNCTION(Runtime_CreateGlobalPrivateSymbol) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, name, 0); Handle<JSObject> registry = isolate->GetSymbolRegistry(); Handle<String> part = isolate->factory()->private_intern_string(); - Handle<JSObject> privates = - Handle<JSObject>::cast(JSObject::GetProperty(registry, part)); - Handle<Object> symbol = JSObject::GetProperty(privates, name); + Handle<Object> privates; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, privates, Object::GetPropertyOrElement(registry, part)); + Handle<Object> symbol; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, symbol, Object::GetPropertyOrElement(privates, name)); if (!symbol->IsSymbol()) { ASSERT(symbol->IsUndefined()); symbol = isolate->factory()->NewPrivateSymbol(); Handle<Symbol>::cast(symbol)->set_name(*name); - JSObject::SetProperty(privates, name, symbol, NONE, STRICT); + JSObject::SetProperty(Handle<JSObject>::cast(privates), + name, symbol, NONE, STRICT).Assert(); } return *symbol; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NewSymbolWrapper) { +RUNTIME_FUNCTION(Runtime_NewSymbolWrapper) { + HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(Symbol, symbol, 0); - return symbol->ToObject(isolate); + CONVERT_ARG_HANDLE_CHECKED(Symbol, symbol, 0); + return *Object::ToObject(isolate, symbol).ToHandleChecked(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolDescription) { +RUNTIME_FUNCTION(Runtime_SymbolDescription) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(Symbol, symbol, 0); @@ -655,14 +665,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolDescription) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolRegistry) { +RUNTIME_FUNCTION(Runtime_SymbolRegistry) { HandleScope scope(isolate); ASSERT(args.length() == 0); return *isolate->GetSymbolRegistry(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolIsPrivate) { +RUNTIME_FUNCTION(Runtime_SymbolIsPrivate) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(Symbol, symbol, 0); @@ -670,49 +680,47 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolIsPrivate) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSProxy) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_CreateJSProxy) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSReceiver, handler, 0); - Object* prototype = args[1]; - Object* used_prototype = - prototype->IsJSReceiver() ? prototype : isolate->heap()->null_value(); - return isolate->heap()->AllocateJSProxy(handler, used_prototype); + CONVERT_ARG_HANDLE_CHECKED(JSReceiver, handler, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, prototype, 1); + if (!prototype->IsJSReceiver()) prototype = isolate->factory()->null_value(); + return *isolate->factory()->NewJSProxy(handler, prototype); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSFunctionProxy) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_CreateJSFunctionProxy) { + HandleScope scope(isolate); ASSERT(args.length() == 4); - CONVERT_ARG_CHECKED(JSReceiver, handler, 0); - Object* call_trap = args[1]; + CONVERT_ARG_HANDLE_CHECKED(JSReceiver, handler, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, call_trap, 1); RUNTIME_ASSERT(call_trap->IsJSFunction() || call_trap->IsJSFunctionProxy()); - CONVERT_ARG_CHECKED(JSFunction, construct_trap, 2); - Object* prototype = args[3]; - Object* used_prototype = - prototype->IsJSReceiver() ? prototype : isolate->heap()->null_value(); - return isolate->heap()->AllocateJSFunctionProxy( - handler, call_trap, construct_trap, used_prototype); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, construct_trap, 2); + CONVERT_ARG_HANDLE_CHECKED(Object, prototype, 3); + if (!prototype->IsJSReceiver()) prototype = isolate->factory()->null_value(); + return *isolate->factory()->NewJSFunctionProxy( + handler, call_trap, construct_trap, prototype); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSProxy) { +RUNTIME_FUNCTION(Runtime_IsJSProxy) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* obj = args[0]; + CONVERT_ARG_HANDLE_CHECKED(Object, obj, 0); return isolate->heap()->ToBoolean(obj->IsJSProxy()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSFunctionProxy) { +RUNTIME_FUNCTION(Runtime_IsJSFunctionProxy) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* obj = args[0]; + CONVERT_ARG_HANDLE_CHECKED(Object, obj, 0); return isolate->heap()->ToBoolean(obj->IsJSFunctionProxy()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHandler) { +RUNTIME_FUNCTION(Runtime_GetHandler) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSProxy, proxy, 0); @@ -720,7 +728,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHandler) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetCallTrap) { +RUNTIME_FUNCTION(Runtime_GetCallTrap) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunctionProxy, proxy, 0); @@ -728,7 +736,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetCallTrap) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetConstructTrap) { +RUNTIME_FUNCTION(Runtime_GetConstructTrap) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunctionProxy, proxy, 0); @@ -736,7 +744,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetConstructTrap) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) { +RUNTIME_FUNCTION(Runtime_Fix) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSProxy, proxy, 0); @@ -802,7 +810,7 @@ bool Runtime::SetupArrayBufferAllocatingData( data = V8::ArrayBufferAllocator()->Allocate(allocated_length); } else { data = - V8::ArrayBufferAllocator()->AllocateUninitialized(allocated_length); + V8::ArrayBufferAllocator()->AllocateUninitialized(allocated_length); } if (data == NULL) return false; } else { @@ -835,41 +843,32 @@ void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferInitialize) { +RUNTIME_FUNCTION(Runtime_ArrayBufferInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, holder, 0); - CONVERT_ARG_HANDLE_CHECKED(Object, byteLength, 1); - size_t allocated_length; - if (byteLength->IsSmi()) { - allocated_length = Smi::cast(*byteLength)->value(); - } else { - ASSERT(byteLength->IsHeapNumber()); - double value = HeapNumber::cast(*byteLength)->value(); - - ASSERT(value >= 0); - - if (value > std::numeric_limits<size_t>::max()) { - return isolate->Throw( - *isolate->factory()->NewRangeError("invalid_array_buffer_length", - HandleVector<Object>(NULL, 0))); - } - - allocated_length = static_cast<size_t>(value); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(byteLength, 1); + if (!holder->byte_length()->IsUndefined()) { + // ArrayBuffer is already initialized; probably a fuzz test. + return *holder; + } + size_t allocated_length = 0; + if (!TryNumberToSize(isolate, *byteLength, &allocated_length)) { + return isolate->Throw( + *isolate->factory()->NewRangeError("invalid_array_buffer_length", + HandleVector<Object>(NULL, 0))); } - if (!Runtime::SetupArrayBufferAllocatingData(isolate, holder, allocated_length)) { - return isolate->Throw(*isolate->factory()-> - NewRangeError("invalid_array_buffer_length", - HandleVector<Object>(NULL, 0))); + return isolate->Throw( + *isolate->factory()->NewRangeError("invalid_array_buffer_length", + HandleVector<Object>(NULL, 0))); } - return *holder; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferGetByteLength) { +RUNTIME_FUNCTION(Runtime_ArrayBufferGetByteLength) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSArrayBuffer, holder, 0); @@ -877,20 +876,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferGetByteLength) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferSliceImpl) { +RUNTIME_FUNCTION(Runtime_ArrayBufferSliceImpl) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, source, 0); CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, target, 1); - CONVERT_DOUBLE_ARG_CHECKED(first, 2); - size_t start = static_cast<size_t>(first); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(first, 2); + size_t start = 0; + RUNTIME_ASSERT(TryNumberToSize(isolate, *first, &start)); size_t target_length = NumberToSize(isolate, target->byte_length()); if (target_length == 0) return isolate->heap()->undefined_value(); size_t source_byte_length = NumberToSize(isolate, source->byte_length()); - CHECK(start <= source_byte_length); - CHECK(source_byte_length - start >= target_length); + RUNTIME_ASSERT(start <= source_byte_length); + RUNTIME_ASSERT(source_byte_length - start >= target_length); uint8_t* source_data = reinterpret_cast<uint8_t*>(source->backing_store()); uint8_t* target_data = reinterpret_cast<uint8_t*>(target->backing_store()); CopyBytes(target_data, source_data + start, target_length); @@ -898,18 +898,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferSliceImpl) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferIsView) { +RUNTIME_FUNCTION(Runtime_ArrayBufferIsView) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(Object, object, 0); - return object->IsJSArrayBufferView() - ? isolate->heap()->true_value() - : isolate->heap()->false_value(); + return isolate->heap()->ToBoolean(object->IsJSArrayBufferView()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferNeuter) { +RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) { HandleScope scope(isolate); + ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, array_buffer, 0); if (array_buffer->backing_store() == NULL) { CHECK(Smi::FromInt(0) == array_buffer->byte_length()); @@ -949,14 +948,18 @@ void Runtime::ArrayIdToTypeAndSize( } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) { +RUNTIME_FUNCTION(Runtime_TypedArrayInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 5); CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0); CONVERT_SMI_ARG_CHECKED(arrayId, 1); CONVERT_ARG_HANDLE_CHECKED(Object, maybe_buffer, 2); - CONVERT_ARG_HANDLE_CHECKED(Object, byte_offset_object, 3); - CONVERT_ARG_HANDLE_CHECKED(Object, byte_length_object, 4); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(byte_offset_object, 3); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(byte_length_object, 4); + + RUNTIME_ASSERT(arrayId >= Runtime::ARRAY_ID_FIRST && + arrayId <= Runtime::ARRAY_ID_LAST); + RUNTIME_ASSERT(maybe_buffer->IsNull() || maybe_buffer->IsJSArrayBuffer()); ASSERT(holder->GetInternalFieldCount() == v8::ArrayBufferView::kInternalFieldCount); @@ -975,19 +978,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) { &fixed_elements_kind, &element_size); + size_t byte_offset = 0; + size_t byte_length = 0; + RUNTIME_ASSERT(TryNumberToSize(isolate, *byte_offset_object, &byte_offset)); + RUNTIME_ASSERT(TryNumberToSize(isolate, *byte_length_object, &byte_length)); + holder->set_byte_offset(*byte_offset_object); holder->set_byte_length(*byte_length_object); - size_t byte_offset = NumberToSize(isolate, *byte_offset_object); - size_t byte_length = NumberToSize(isolate, *byte_length_object); - CHECK_EQ(0, static_cast<int>(byte_length % element_size)); size_t length = byte_length / element_size; if (length > static_cast<unsigned>(Smi::kMaxValue)) { - return isolate->Throw(*isolate->factory()-> - NewRangeError("invalid_typed_array_length", - HandleVector<Object>(NULL, 0))); + return isolate->Throw( + *isolate->factory()->NewRangeError("invalid_typed_array_length", + HandleVector<Object>(NULL, 0))); } Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(length); @@ -997,8 +1002,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) { size_t array_buffer_byte_length = NumberToSize(isolate, buffer->byte_length()); - CHECK(byte_offset <= array_buffer_byte_length); - CHECK(array_buffer_byte_length - byte_offset >= byte_length); + RUNTIME_ASSERT(byte_offset <= array_buffer_byte_length); + RUNTIME_ASSERT(array_buffer_byte_length - byte_offset >= byte_length); holder->set_buffer(*buffer); holder->set_weak_next(buffer->weak_first_view()); @@ -1010,7 +1015,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) { static_cast<uint8_t*>(buffer->backing_store()) + byte_offset); Handle<Map> map = JSObject::GetElementsTransitionMap(holder, external_elements_kind); - holder->set_map_and_elements(*map, *elements); + JSObject::SetMapAndElements(holder, map, elements); ASSERT(IsExternalArrayElementsKind(holder->map()->elements_kind())); } else { holder->set_buffer(Smi::FromInt(0)); @@ -1029,13 +1034,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) { // initializes backing store using memove. // // Returns true if backing store was initialized or false otherwise. -RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) { +RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0); CONVERT_SMI_ARG_CHECKED(arrayId, 1); CONVERT_ARG_HANDLE_CHECKED(Object, source, 2); - CONVERT_ARG_HANDLE_CHECKED(Object, length_obj, 3); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(length_obj, 3); + + RUNTIME_ASSERT(arrayId >= Runtime::ARRAY_ID_FIRST && + arrayId <= Runtime::ARRAY_ID_LAST); ASSERT(holder->GetInternalFieldCount() == v8::ArrayBufferView::kInternalFieldCount); @@ -1059,7 +1067,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) { JSTypedArray::cast(*source)->type() == array_type) { length_obj = Handle<Object>(JSTypedArray::cast(*source)->length(), isolate); } - size_t length = NumberToSize(isolate, *length_obj); + size_t length = 0; + RUNTIME_ASSERT(TryNumberToSize(isolate, *length_obj, &length)); if ((length > static_cast<unsigned>(Smi::kMaxValue)) || (length > (kMaxInt / element_size))) { @@ -1107,7 +1116,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) { static_cast<uint8_t*>(buffer->backing_store())); Handle<Map> map = JSObject::GetElementsTransitionMap( holder, external_elements_kind); - holder->set_map_and_elements(*map, *elements); + JSObject::SetMapAndElements(holder, map, elements); if (source->IsJSTypedArray()) { Handle<JSTypedArray> typed_array(JSTypedArray::cast(*source)); @@ -1122,43 +1131,34 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) { buffer->backing_store(), backing_store + source_byte_offset, byte_length); - return *isolate->factory()->true_value(); - } else { - return *isolate->factory()->false_value(); + return isolate->heap()->true_value(); } } - return *isolate->factory()->false_value(); + return isolate->heap()->false_value(); } -#define TYPED_ARRAY_GETTER(getter, accessor) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayGet##getter) { \ +#define BUFFER_VIEW_GETTER(Type, getter, accessor) \ + RUNTIME_FUNCTION(Runtime_##Type##Get##getter) { \ HandleScope scope(isolate); \ ASSERT(args.length() == 1); \ - CONVERT_ARG_HANDLE_CHECKED(Object, holder, 0); \ - if (!holder->IsJSTypedArray()) \ - return isolate->Throw(*isolate->factory()->NewTypeError( \ - "not_typed_array", HandleVector<Object>(NULL, 0))); \ - Handle<JSTypedArray> typed_array(JSTypedArray::cast(*holder)); \ - return typed_array->accessor(); \ + CONVERT_ARG_HANDLE_CHECKED(JS##Type, holder, 0); \ + return holder->accessor(); \ } -TYPED_ARRAY_GETTER(ByteLength, byte_length) -TYPED_ARRAY_GETTER(ByteOffset, byte_offset) -TYPED_ARRAY_GETTER(Length, length) +BUFFER_VIEW_GETTER(ArrayBufferView, ByteLength, byte_length) +BUFFER_VIEW_GETTER(ArrayBufferView, ByteOffset, byte_offset) +BUFFER_VIEW_GETTER(TypedArray, Length, length) +BUFFER_VIEW_GETTER(DataView, Buffer, buffer) -#undef TYPED_ARRAY_GETTER +#undef BUFFER_VIEW_GETTER -RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayGetBuffer) { +RUNTIME_FUNCTION(Runtime_TypedArrayGetBuffer) { HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_HANDLE_CHECKED(Object, holder, 0); - if (!holder->IsJSTypedArray()) - return isolate->Throw(*isolate->factory()->NewTypeError( - "not_typed_array", HandleVector<Object>(NULL, 0))); - Handle<JSTypedArray> typed_array(JSTypedArray::cast(*holder)); - return *typed_array->GetBuffer(); + CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0); + return *holder->GetBuffer(); } @@ -1177,22 +1177,24 @@ enum TypedArraySetResultCodes { }; -RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) { +RUNTIME_FUNCTION(Runtime_TypedArraySetFastCases) { HandleScope scope(isolate); - CONVERT_ARG_HANDLE_CHECKED(Object, target_obj, 0); - CONVERT_ARG_HANDLE_CHECKED(Object, source_obj, 1); - CONVERT_ARG_HANDLE_CHECKED(Object, offset_obj, 2); - - if (!target_obj->IsJSTypedArray()) + ASSERT(args.length() == 3); + if (!args[0]->IsJSTypedArray()) return isolate->Throw(*isolate->factory()->NewTypeError( "not_typed_array", HandleVector<Object>(NULL, 0))); - if (!source_obj->IsJSTypedArray()) + if (!args[1]->IsJSTypedArray()) return Smi::FromInt(TYPED_ARRAY_SET_NON_TYPED_ARRAY); + CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, target_obj, 0); + CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, source_obj, 1); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(offset_obj, 2); + Handle<JSTypedArray> target(JSTypedArray::cast(*target_obj)); Handle<JSTypedArray> source(JSTypedArray::cast(*source_obj)); - size_t offset = NumberToSize(isolate, *offset_obj); + size_t offset = 0; + RUNTIME_ASSERT(TryNumberToSize(isolate, *offset_obj, &offset)); size_t target_length = NumberToSize(isolate, target->length()); size_t source_length = NumberToSize(isolate, source->length()); size_t target_byte_length = NumberToSize(isolate, target->byte_length()); @@ -1235,34 +1237,44 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayMaxSizeInHeap) { - ASSERT_OBJECT_SIZE(FLAG_typed_array_max_size_in_heap); +RUNTIME_FUNCTION(Runtime_TypedArrayMaxSizeInHeap) { + ASSERT(args.length() == 0); + ASSERT_OBJECT_SIZE( + FLAG_typed_array_max_size_in_heap + FixedTypedArrayBase::kDataOffset); return Smi::FromInt(FLAG_typed_array_max_size_in_heap); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewInitialize) { +RUNTIME_FUNCTION(Runtime_DataViewInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSDataView, holder, 0); CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, buffer, 1); - CONVERT_ARG_HANDLE_CHECKED(Object, byte_offset, 2); - CONVERT_ARG_HANDLE_CHECKED(Object, byte_length, 3); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(byte_offset, 2); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(byte_length, 3); ASSERT(holder->GetInternalFieldCount() == v8::ArrayBufferView::kInternalFieldCount); for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) { holder->SetInternalField(i, Smi::FromInt(0)); } + size_t buffer_length = 0; + size_t offset = 0; + size_t length = 0; + RUNTIME_ASSERT( + TryNumberToSize(isolate, buffer->byte_length(), &buffer_length)); + RUNTIME_ASSERT(TryNumberToSize(isolate, *byte_offset, &offset)); + RUNTIME_ASSERT(TryNumberToSize(isolate, *byte_length, &length)); + + // TODO(jkummerow): When we have a "safe numerics" helper class, use it here. + // Entire range [offset, offset + length] must be in bounds. + RUNTIME_ASSERT(offset <= buffer_length); + RUNTIME_ASSERT(offset + length <= buffer_length); + // No overflow. + RUNTIME_ASSERT(offset + length >= offset); holder->set_buffer(*buffer); - ASSERT(byte_offset->IsNumber()); - ASSERT( - NumberToSize(isolate, buffer->byte_length()) >= - NumberToSize(isolate, *byte_offset) - + NumberToSize(isolate, *byte_length)); holder->set_byte_offset(*byte_offset); - ASSERT(byte_length->IsNumber()); holder->set_byte_length(*byte_length); holder->set_weak_next(buffer->weak_first_view()); @@ -1272,30 +1284,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewInitialize) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewGetBuffer) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_HANDLE_CHECKED(JSDataView, data_view, 0); - return data_view->buffer(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewGetByteOffset) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_HANDLE_CHECKED(JSDataView, data_view, 0); - return data_view->byte_offset(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewGetByteLength) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_HANDLE_CHECKED(JSDataView, data_view, 0); - return data_view->byte_length(); -} - - inline static bool NeedToFlipBytes(bool is_little_endian) { #ifdef V8_TARGET_LITTLE_ENDIAN return !is_little_endian; @@ -1411,7 +1399,7 @@ static bool DataViewSetValue( #define DATA_VIEW_GETTER(TypeName, Type, Converter) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewGet##TypeName) { \ + RUNTIME_FUNCTION(Runtime_DataViewGet##TypeName) { \ HandleScope scope(isolate); \ ASSERT(args.length() == 3); \ CONVERT_ARG_HANDLE_CHECKED(JSDataView, holder, 0); \ @@ -1420,7 +1408,7 @@ static bool DataViewSetValue( Type result; \ if (DataViewGetValue( \ isolate, holder, offset, is_little_endian, &result)) { \ - return isolate->heap()->Converter(result); \ + return *isolate->factory()->Converter(result); \ } else { \ return isolate->Throw(*isolate->factory()->NewRangeError( \ "invalid_data_view_accessor_offset", \ @@ -1428,14 +1416,14 @@ static bool DataViewSetValue( } \ } -DATA_VIEW_GETTER(Uint8, uint8_t, NumberFromUint32) -DATA_VIEW_GETTER(Int8, int8_t, NumberFromInt32) -DATA_VIEW_GETTER(Uint16, uint16_t, NumberFromUint32) -DATA_VIEW_GETTER(Int16, int16_t, NumberFromInt32) -DATA_VIEW_GETTER(Uint32, uint32_t, NumberFromUint32) -DATA_VIEW_GETTER(Int32, int32_t, NumberFromInt32) -DATA_VIEW_GETTER(Float32, float, NumberFromDouble) -DATA_VIEW_GETTER(Float64, double, NumberFromDouble) +DATA_VIEW_GETTER(Uint8, uint8_t, NewNumberFromUint) +DATA_VIEW_GETTER(Int8, int8_t, NewNumberFromInt) +DATA_VIEW_GETTER(Uint16, uint16_t, NewNumberFromUint) +DATA_VIEW_GETTER(Int16, int16_t, NewNumberFromInt) +DATA_VIEW_GETTER(Uint32, uint32_t, NewNumberFromUint) +DATA_VIEW_GETTER(Int32, int32_t, NewNumberFromInt) +DATA_VIEW_GETTER(Float32, float, NewNumber) +DATA_VIEW_GETTER(Float64, double, NewNumber) #undef DATA_VIEW_GETTER @@ -1493,7 +1481,7 @@ double DataViewConvertValue<double>(double value) { #define DATA_VIEW_SETTER(TypeName, Type) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_DataViewSet##TypeName) { \ + RUNTIME_FUNCTION(Runtime_DataViewSet##TypeName) { \ HandleScope scope(isolate); \ ASSERT(args.length() == 4); \ CONVERT_ARG_HANDLE_CHECKED(JSDataView, holder, 0); \ @@ -1523,177 +1511,259 @@ DATA_VIEW_SETTER(Float64, double) #undef DATA_VIEW_SETTER -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) { +RUNTIME_FUNCTION(Runtime_SetInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); - Handle<ObjectHashSet> table = isolate->factory()->NewObjectHashSet(0); + Handle<OrderedHashSet> table = isolate->factory()->NewOrderedHashSet(); holder->set_table(*table); return *holder; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) { +RUNTIME_FUNCTION(Runtime_SetAdd) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1], isolate); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - table = ObjectHashSet::Add(table, key); + CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); + table = OrderedHashSet::Add(table, key); holder->set_table(*table); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHas) { +RUNTIME_FUNCTION(Runtime_SetHas) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1], isolate); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - return isolate->heap()->ToBoolean(table->Contains(*key)); + CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); + return isolate->heap()->ToBoolean(table->Contains(key)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) { +RUNTIME_FUNCTION(Runtime_SetDelete) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1], isolate); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - table = ObjectHashSet::Remove(table, key); + CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); + table = OrderedHashSet::Remove(table, key); holder->set_table(*table); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetGetSize) { +RUNTIME_FUNCTION(Runtime_SetClear) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); + table = OrderedHashSet::Clear(table); + holder->set_table(*table); + return isolate->heap()->undefined_value(); +} + + +RUNTIME_FUNCTION(Runtime_SetGetSize) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); return Smi::FromInt(table->NumberOfElements()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) { +RUNTIME_FUNCTION(Runtime_SetCreateIterator) { + HandleScope scope(isolate); + ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0); + CONVERT_SMI_ARG_CHECKED(kind, 1) + RUNTIME_ASSERT(kind == JSSetIterator::kKindValues || + kind == JSSetIterator::kKindEntries); + Handle<OrderedHashSet> table(OrderedHashSet::cast(holder->table())); + return *JSSetIterator::Create(table, kind); +} + + +RUNTIME_FUNCTION(Runtime_SetIteratorNext) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSSetIterator, holder, 0); + return *JSSetIterator::Next(holder); +} + + +RUNTIME_FUNCTION(Runtime_SetIteratorClose) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSSetIterator, holder, 0); + holder->Close(); + return isolate->heap()->undefined_value(); +} + + +RUNTIME_FUNCTION(Runtime_MapInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); - Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0); + Handle<OrderedHashMap> table = isolate->factory()->NewOrderedHashMap(); holder->set_table(*table); return *holder; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) { +RUNTIME_FUNCTION(Runtime_MapGet) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + Handle<Object> lookup(table->Lookup(key), isolate); return lookup->IsTheHole() ? isolate->heap()->undefined_value() : *lookup; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapHas) { +RUNTIME_FUNCTION(Runtime_MapHas) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + Handle<Object> lookup(table->Lookup(key), isolate); return isolate->heap()->ToBoolean(!lookup->IsTheHole()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapDelete) { +RUNTIME_FUNCTION(Runtime_MapDelete) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); - Handle<ObjectHashTable> new_table = - ObjectHashTable::Put(table, key, isolate->factory()->the_hole_value()); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + Handle<Object> lookup(table->Lookup(key), isolate); + Handle<OrderedHashMap> new_table = + OrderedHashMap::Put(table, key, isolate->factory()->the_hole_value()); holder->set_table(*new_table); return isolate->heap()->ToBoolean(!lookup->IsTheHole()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) { +RUNTIME_FUNCTION(Runtime_MapClear) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + table = OrderedHashMap::Clear(table); + holder->set_table(*table); + return isolate->heap()->undefined_value(); +} + + +RUNTIME_FUNCTION(Runtime_MapSet) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); - Handle<ObjectHashTable> new_table = ObjectHashTable::Put(table, key, value); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + Handle<OrderedHashMap> new_table = OrderedHashMap::Put(table, key, value); holder->set_table(*new_table); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGetSize) { +RUNTIME_FUNCTION(Runtime_MapGetSize) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); return Smi::FromInt(table->NumberOfElements()); } -static JSWeakCollection* WeakCollectionInitialize(Isolate* isolate, +RUNTIME_FUNCTION(Runtime_MapCreateIterator) { + HandleScope scope(isolate); + ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0); + CONVERT_SMI_ARG_CHECKED(kind, 1) + RUNTIME_ASSERT(kind == JSMapIterator::kKindKeys + || kind == JSMapIterator::kKindValues + || kind == JSMapIterator::kKindEntries); + Handle<OrderedHashMap> table(OrderedHashMap::cast(holder->table())); + return *JSMapIterator::Create(table, kind); +} + + +RUNTIME_FUNCTION(Runtime_MapIteratorNext) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSMapIterator, holder, 0); + return *JSMapIterator::Next(holder); +} + + +RUNTIME_FUNCTION(Runtime_MapIteratorClose) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSMapIterator, holder, 0); + holder->Close(); + return isolate->heap()->undefined_value(); +} + + +static Handle<JSWeakCollection> WeakCollectionInitialize( + Isolate* isolate, Handle<JSWeakCollection> weak_collection) { ASSERT(weak_collection->map()->inobject_properties() == 0); - Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0); + Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 0); weak_collection->set_table(*table); weak_collection->set_next(Smi::FromInt(0)); - return *weak_collection; + return weak_collection; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionInitialize) { +RUNTIME_FUNCTION(Runtime_WeakCollectionInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSWeakCollection, weak_collection, 0); - return WeakCollectionInitialize(isolate, weak_collection); + return *WeakCollectionInitialize(isolate, weak_collection); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionGet) { +RUNTIME_FUNCTION(Runtime_WeakCollectionGet) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSWeakCollection, weak_collection, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); Handle<ObjectHashTable> table( ObjectHashTable::cast(weak_collection->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); + Handle<Object> lookup(table->Lookup(key), isolate); return lookup->IsTheHole() ? isolate->heap()->undefined_value() : *lookup; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionHas) { +RUNTIME_FUNCTION(Runtime_WeakCollectionHas) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSWeakCollection, weak_collection, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); Handle<ObjectHashTable> table( ObjectHashTable::cast(weak_collection->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); + Handle<Object> lookup(table->Lookup(key), isolate); return isolate->heap()->ToBoolean(!lookup->IsTheHole()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionDelete) { +RUNTIME_FUNCTION(Runtime_WeakCollectionDelete) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSWeakCollection, weak_collection, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); Handle<ObjectHashTable> table(ObjectHashTable::cast( weak_collection->table())); - Handle<Object> lookup(table->Lookup(*key), isolate); + Handle<Object> lookup(table->Lookup(key), isolate); Handle<ObjectHashTable> new_table = ObjectHashTable::Put(table, key, isolate->factory()->the_hole_value()); weak_collection->set_table(*new_table); @@ -1701,12 +1771,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionDelete) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionSet) { +RUNTIME_FUNCTION(Runtime_WeakCollectionSet) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSWeakCollection, weak_collection, 0); CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); - Handle<Object> value(args[2], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); Handle<ObjectHashTable> table( ObjectHashTable::cast(weak_collection->table())); Handle<ObjectHashTable> new_table = ObjectHashTable::Put(table, key, value); @@ -1715,16 +1785,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakCollectionSet) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ClassOf) { +RUNTIME_FUNCTION(Runtime_ClassOf) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* obj = args[0]; + CONVERT_ARG_CHECKED(Object, obj, 0); if (!obj->IsJSObject()) return isolate->heap()->null_value(); return JSObject::cast(obj)->class_name(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPrototype) { +RUNTIME_FUNCTION(Runtime_GetPrototype) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(Object, obj, 0); @@ -1732,54 +1802,52 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPrototype) { ASSERT(!obj->IsAccessCheckNeeded() || obj->IsJSObject()); do { if (obj->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(Handle<JSObject>::cast(obj), - isolate->factory()->proto_string(), - v8::ACCESS_GET)) { - isolate->ReportFailedAccessCheckWrapper(Handle<JSObject>::cast(obj), - v8::ACCESS_GET); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + !isolate->MayNamedAccess(Handle<JSObject>::cast(obj), + isolate->factory()->proto_string(), + v8::ACCESS_GET)) { + isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(obj), + v8::ACCESS_GET); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return isolate->heap()->undefined_value(); } - obj = handle(obj->GetPrototype(isolate), isolate); + obj = Object::GetPrototype(isolate, obj); } while (obj->IsJSObject() && JSObject::cast(*obj)->map()->is_hidden_prototype()); return *obj; } -static inline Object* GetPrototypeSkipHiddenPrototypes(Isolate* isolate, - Object* receiver) { - Object* current = receiver->GetPrototype(isolate); +static inline Handle<Object> GetPrototypeSkipHiddenPrototypes( + Isolate* isolate, Handle<Object> receiver) { + Handle<Object> current = Object::GetPrototype(isolate, receiver); while (current->IsJSObject() && - JSObject::cast(current)->map()->is_hidden_prototype()) { - current = current->GetPrototype(isolate); + JSObject::cast(*current)->map()->is_hidden_prototype()) { + current = Object::GetPrototype(isolate, current); } return current; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetPrototype) { +RUNTIME_FUNCTION(Runtime_SetPrototype) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); CONVERT_ARG_HANDLE_CHECKED(Object, prototype, 1); if (obj->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(obj, - isolate->factory()->proto_string(), - v8::ACCESS_SET)) { - isolate->ReportFailedAccessCheckWrapper(obj, v8::ACCESS_SET); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + !isolate->MayNamedAccess( + obj, isolate->factory()->proto_string(), v8::ACCESS_SET)) { + isolate->ReportFailedAccessCheck(obj, v8::ACCESS_SET); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return isolate->heap()->undefined_value(); } if (obj->map()->is_observed()) { - Handle<Object> old_value( - GetPrototypeSkipHiddenPrototypes(isolate, *obj), isolate); - - Handle<Object> result = JSObject::SetPrototype(obj, prototype, true); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> old_value = GetPrototypeSkipHiddenPrototypes(isolate, obj); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetPrototype(obj, prototype, true)); - Handle<Object> new_value( - GetPrototypeSkipHiddenPrototypes(isolate, *obj), isolate); + Handle<Object> new_value = GetPrototypeSkipHiddenPrototypes(isolate, obj); if (!new_value->SameValue(*old_value)) { JSObject::EnqueueChangeRecord(obj, "setPrototype", isolate->factory()->proto_string(), @@ -1787,22 +1855,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetPrototype) { } return *result; } - Handle<Object> result = JSObject::SetPrototype(obj, prototype, true); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetPrototype(obj, prototype, true)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsInPrototypeChain) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_IsInPrototypeChain) { + HandleScope shs(isolate); ASSERT(args.length() == 2); // See ECMA-262, section 15.3.5.3, page 88 (steps 5 - 8). - Object* O = args[0]; - Object* V = args[1]; + CONVERT_ARG_HANDLE_CHECKED(Object, O, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, V, 1); while (true) { - Object* prototype = V->GetPrototype(isolate); + Handle<Object> prototype = Object::GetPrototype(isolate, V); if (prototype->IsNull()) return isolate->heap()->false_value(); - if (O == prototype) return isolate->heap()->true_value(); + if (*O == *prototype) return isolate->heap()->true_value(); V = prototype; } } @@ -1811,6 +1881,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsInPrototypeChain) { static bool CheckAccessException(Object* callback, v8::AccessType access_type) { DisallowHeapAllocation no_gc; + ASSERT(!callback->IsForeign()); if (callback->IsAccessorInfo()) { AccessorInfo* info = AccessorInfo::cast(callback); return @@ -1866,22 +1937,22 @@ static AccessCheckResult CheckPropertyAccess(Handle<JSObject> obj, if (name->AsArrayIndex(&index)) { // TODO(1095): we should traverse hidden prototype hierachy as well. if (CheckGenericAccess( - obj, obj, index, access_type, &Isolate::MayIndexedAccessWrapper)) { + obj, obj, index, access_type, &Isolate::MayIndexedAccess)) { return ACCESS_ALLOWED; } - obj->GetIsolate()->ReportFailedAccessCheckWrapper(obj, access_type); + obj->GetIsolate()->ReportFailedAccessCheck(obj, access_type); return ACCESS_FORBIDDEN; } Isolate* isolate = obj->GetIsolate(); LookupResult lookup(isolate); - obj->LocalLookup(*name, &lookup, true); + obj->LocalLookup(name, &lookup, true); if (!lookup.IsProperty()) return ACCESS_ABSENT; Handle<JSObject> holder(lookup.holder(), isolate); if (CheckGenericAccess<Handle<Object> >( - obj, holder, name, access_type, &Isolate::MayNamedAccessWrapper)) { + obj, holder, name, access_type, &Isolate::MayNamedAccess)) { return ACCESS_ALLOWED; } @@ -1898,7 +1969,7 @@ static AccessCheckResult CheckPropertyAccess(Handle<JSObject> obj, case INTERCEPTOR: // If the object has an interceptor, try real named properties. // Overwrite the result to fetch the correct property later. - holder->LookupRealNamedProperty(*name, &lookup); + holder->LookupRealNamedProperty(name, &lookup); if (lookup.IsProperty() && lookup.IsPropertyCallbacks()) { if (CheckAccessException(lookup.GetCallbackObject(), access_type)) { return ACCESS_ALLOWED; @@ -1909,7 +1980,7 @@ static AccessCheckResult CheckPropertyAccess(Handle<JSObject> obj, break; } - isolate->ReportFailedAccessCheckWrapper(obj, access_type); + isolate->ReportFailedAccessCheck(obj, access_type); return ACCESS_FORBIDDEN; } @@ -1927,16 +1998,16 @@ enum PropertyDescriptorIndices { }; -static Handle<Object> GetOwnProperty(Isolate* isolate, - Handle<JSObject> obj, - Handle<Name> name) { +MUST_USE_RESULT static MaybeHandle<Object> GetOwnProperty(Isolate* isolate, + Handle<JSObject> obj, + Handle<Name> name) { Heap* heap = isolate->heap(); Factory* factory = isolate->factory(); // Due to some WebKit tests, we want to make sure that we do not log // more than one access failure here. AccessCheckResult access_check_result = CheckPropertyAccess(obj, name, v8::ACCESS_HAS); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); switch (access_check_result) { case ACCESS_FORBIDDEN: return factory->false_value(); case ACCESS_ALLOWED: break; @@ -1945,22 +2016,25 @@ static Handle<Object> GetOwnProperty(Isolate* isolate, PropertyAttributes attrs = JSReceiver::GetLocalPropertyAttribute(obj, name); if (attrs == ABSENT) { - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); return factory->undefined_value(); } ASSERT(!isolate->has_scheduled_exception()); - AccessorPair* raw_accessors = obj->GetLocalPropertyAccessorPair(*name); - Handle<AccessorPair> accessors(raw_accessors, isolate); + Handle<AccessorPair> accessors; + bool has_accessors = + JSObject::GetLocalPropertyAccessorPair(obj, name).ToHandle(&accessors); Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE); elms->set(ENUMERABLE_INDEX, heap->ToBoolean((attrs & DONT_ENUM) == 0)); elms->set(CONFIGURABLE_INDEX, heap->ToBoolean((attrs & DONT_DELETE) == 0)); - elms->set(IS_ACCESSOR_INDEX, heap->ToBoolean(raw_accessors != NULL)); + elms->set(IS_ACCESSOR_INDEX, heap->ToBoolean(has_accessors)); - if (raw_accessors == NULL) { + if (!has_accessors) { elms->set(WRITABLE_INDEX, heap->ToBoolean((attrs & READ_ONLY) == 0)); - // GetProperty does access check. - Handle<Object> value = GetProperty(isolate, obj, name); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, value, Handle<Object>::null()); + // Runtime::GetObjectProperty does access check. + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, Runtime::GetObjectProperty(isolate, obj, name), + Object); elms->set(VALUE_INDEX, *value); } else { // Access checks are performed for both accessors separately. @@ -1972,14 +2046,14 @@ static Handle<Object> GetOwnProperty(Isolate* isolate, ASSERT(!isolate->has_scheduled_exception()); elms->set(GETTER_INDEX, *getter); } else { - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); } if (!setter->IsMap() && CheckPropertyAccess(obj, name, v8::ACCESS_SET)) { ASSERT(!isolate->has_scheduled_exception()); elms->set(SETTER_INDEX, *setter); } else { - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); } } @@ -1994,28 +2068,30 @@ static Handle<Object> GetOwnProperty(Isolate* isolate, // [false, value, Writeable, Enumerable, Configurable] // if args[1] is an accessor on args[0] // [true, GetFunction, SetFunction, Enumerable, Configurable] -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) { +RUNTIME_FUNCTION(Runtime_GetOwnProperty) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); - Handle<Object> result = GetOwnProperty(isolate, obj, name); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, GetOwnProperty(isolate, obj, name)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_PreventExtensions) { +RUNTIME_FUNCTION(Runtime_PreventExtensions) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); - Handle<Object> result = JSObject::PreventExtensions(obj); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, JSObject::PreventExtensions(obj)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsExtensible) { +RUNTIME_FUNCTION(Runtime_IsExtensible) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSObject, obj, 0); @@ -2029,44 +2105,46 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsExtensible) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpCompile) { +RUNTIME_FUNCTION(Runtime_RegExpCompile) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSRegExp, re, 0); CONVERT_ARG_HANDLE_CHECKED(String, pattern, 1); CONVERT_ARG_HANDLE_CHECKED(String, flags, 2); - Handle<Object> result = RegExpImpl::Compile(re, pattern, flags); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, RegExpImpl::Compile(re, pattern, flags)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateApiFunction) { +RUNTIME_FUNCTION(Runtime_CreateApiFunction) { HandleScope scope(isolate); - ASSERT(args.length() == 1); + ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(FunctionTemplateInfo, data, 0); - return *isolate->factory()->CreateApiFunction(data); + CONVERT_ARG_HANDLE_CHECKED(Object, prototype, 1); + return *isolate->factory()->CreateApiFunction(data, prototype); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsTemplate) { +RUNTIME_FUNCTION(Runtime_IsTemplate) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* arg = args[0]; + CONVERT_ARG_HANDLE_CHECKED(Object, arg, 0); bool result = arg->IsObjectTemplateInfo() || arg->IsFunctionTemplateInfo(); return isolate->heap()->ToBoolean(result); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetTemplateField) { +RUNTIME_FUNCTION(Runtime_GetTemplateField) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); CONVERT_ARG_CHECKED(HeapObject, templ, 0); - CONVERT_SMI_ARG_CHECKED(index, 1) + CONVERT_SMI_ARG_CHECKED(index, 1); int offset = index * kPointerSize + HeapObject::kHeaderSize; InstanceType type = templ->map()->instance_type(); - RUNTIME_ASSERT(type == FUNCTION_TEMPLATE_INFO_TYPE || - type == OBJECT_TEMPLATE_INFO_TYPE); + RUNTIME_ASSERT(type == FUNCTION_TEMPLATE_INFO_TYPE || + type == OBJECT_TEMPLATE_INFO_TYPE); RUNTIME_ASSERT(offset > 0); if (type == FUNCTION_TEMPLATE_INFO_TYPE) { RUNTIME_ASSERT(offset < FunctionTemplateInfo::kSize); @@ -2077,38 +2155,40 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetTemplateField) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DisableAccessChecks) { + HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(HeapObject, object, 0); - Map* old_map = object->map(); + CONVERT_ARG_HANDLE_CHECKED(HeapObject, object, 0); + Handle<Map> old_map(object->map()); bool needs_access_checks = old_map->is_access_check_needed(); if (needs_access_checks) { // Copy map so it won't interfere constructor's initial map. - Map* new_map; - MaybeObject* maybe_new_map = old_map->Copy(); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; - + Handle<Map> new_map = Map::Copy(old_map); new_map->set_is_access_check_needed(false); - object->set_map(new_map); + if (object->IsJSObject()) { + JSObject::MigrateToMap(Handle<JSObject>::cast(object), new_map); + } else { + object->set_map(*new_map); + } } return isolate->heap()->ToBoolean(needs_access_checks); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_EnableAccessChecks) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_EnableAccessChecks) { + HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(HeapObject, object, 0); - Map* old_map = object->map(); + CONVERT_ARG_HANDLE_CHECKED(HeapObject, object, 0); + Handle<Map> old_map(object->map()); if (!old_map->is_access_check_needed()) { // Copy map so it won't interfere constructor's initial map. - Map* new_map; - MaybeObject* maybe_new_map = old_map->Copy(); - if (!maybe_new_map->To(&new_map)) return maybe_new_map; - + Handle<Map> new_map = Map::Copy(old_map); new_map->set_is_access_check_needed(true); - object->set_map(new_map); + if (object->IsJSObject()) { + JSObject::MigrateToMap(Handle<JSObject>::cast(object), new_map); + } else { + object->set_map(*new_map); + } } return isolate->heap()->undefined_value(); } @@ -2124,7 +2204,7 @@ static Handle<Object> InstantiateAccessorComponent(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAccessorProperty) { +RUNTIME_FUNCTION(Runtime_SetAccessorProperty) { HandleScope scope(isolate); ASSERT(args.length() == 6); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); @@ -2135,6 +2215,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAccessorProperty) { CONVERT_SMI_ARG_CHECKED(access_control, 5); RUNTIME_ASSERT(getter->IsUndefined() || getter->IsFunctionTemplateInfo()); RUNTIME_ASSERT(setter->IsUndefined() || setter->IsFunctionTemplateInfo()); + RUNTIME_ASSERT(PropertyDetails::AttributesField::is_valid( + static_cast<PropertyAttributes>(attribute))); JSObject::DefineAccessor(object, name, InstantiateAccessorComponent(isolate, getter), @@ -2145,26 +2227,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAccessorProperty) { } -static Failure* ThrowRedeclarationError(Isolate* isolate, - const char* type, - Handle<String> name) { +static Object* ThrowRedeclarationError(Isolate* isolate, Handle<String> name) { HandleScope scope(isolate); - Handle<Object> type_handle = - isolate->factory()->NewStringFromAscii(CStrVector(type)); - Handle<Object> args[2] = { type_handle, name }; - Handle<Object> error = - isolate->factory()->NewTypeError("redeclaration", HandleVector(args, 2)); + Handle<Object> args[1] = { name }; + Handle<Object> error = isolate->factory()->NewTypeError( + "var_redeclaration", HandleVector(args, 1)); return isolate->Throw(*error); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareGlobals) { +RUNTIME_FUNCTION(RuntimeHidden_DeclareGlobals) { HandleScope scope(isolate); ASSERT(args.length() == 3); Handle<GlobalObject> global = Handle<GlobalObject>( isolate->context()->global_object()); - Handle<Context> context = args.at<Context>(0); + CONVERT_ARG_HANDLE_CHECKED(Context, context, 0); CONVERT_ARG_HANDLE_CHECKED(FixedArray, pairs, 1); CONVERT_SMI_ARG_CHECKED(flags, 2); @@ -2188,7 +2266,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareGlobals) { // value of the variable if the property is already there. // Do the lookup locally only, see ES5 erratum. LookupResult lookup(isolate); - global->LocalLookup(*name, &lookup, true); + global->LocalLookup(name, &lookup, true); if (lookup.IsFound()) { // We found an existing property. Unless it was an interceptor // that claims the property is absent, skip this declaration. @@ -2208,7 +2286,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareGlobals) { } LookupResult lookup(isolate); - global->LocalLookup(*name, &lookup, true); + global->LocalLookup(name, &lookup, true); // Compute the property attributes. According to ECMA-262, // the property must be non-configurable except in eval. @@ -2230,18 +2308,19 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareGlobals) { if (lookup.IsFound() && lookup.IsDontDelete()) { if (lookup.IsReadOnly() || lookup.IsDontEnum() || lookup.IsPropertyCallbacks()) { - return ThrowRedeclarationError(isolate, "function", name); + return ThrowRedeclarationError(isolate, name); } // If the existing property is not configurable, keep its attributes. attr = lookup.GetAttributes(); } // Define or redefine own property. - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( global, name, value, static_cast<PropertyAttributes>(attr))); } else { // Do a [[Put]] on the existing (own) property. - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION( + isolate, JSObject::SetProperty( global, name, value, static_cast<PropertyAttributes>(attr), strict_mode)); @@ -2253,20 +2332,20 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareGlobals) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareContextSlot) { +RUNTIME_FUNCTION(RuntimeHidden_DeclareContextSlot) { HandleScope scope(isolate); ASSERT(args.length() == 4); // Declarations are always made in a function or native context. In the // case of eval code, the context passed is the context of the caller, // which may be some nested context and not the declaration context. - RUNTIME_ASSERT(args[0]->IsContext()); - Handle<Context> context(Context::cast(args[0])->declaration_context()); - - Handle<String> name(String::cast(args[1])); - PropertyAttributes mode = static_cast<PropertyAttributes>(args.smi_at(2)); + CONVERT_ARG_HANDLE_CHECKED(Context, context_arg, 0); + Handle<Context> context(context_arg->declaration_context()); + CONVERT_ARG_HANDLE_CHECKED(String, name, 1); + CONVERT_SMI_ARG_CHECKED(mode_arg, 2); + PropertyAttributes mode = static_cast<PropertyAttributes>(mode_arg); RUNTIME_ASSERT(mode == READ_ONLY || mode == NONE); - Handle<Object> initial_value(args[3], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, initial_value, 3); int index; PropertyAttributes attributes; @@ -2282,8 +2361,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareContextSlot) { if (((attributes & READ_ONLY) != 0) || (mode == READ_ONLY)) { // Functions are not read-only. ASSERT(mode != READ_ONLY || initial_value->IsTheHole()); - const char* type = ((attributes & READ_ONLY) != 0) ? "const" : "var"; - return ThrowRedeclarationError(isolate, type, name); + return ThrowRedeclarationError(isolate, name); } // Initialize it if necessary. @@ -2298,7 +2376,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareContextSlot) { // Slow case: The property is in the context extension object of a // function context or the global object of a native context. Handle<JSObject> object = Handle<JSObject>::cast(holder); - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSReceiver::SetProperty(object, name, initial_value, mode, SLOPPY)); } @@ -2335,17 +2413,17 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareContextSlot) { if (initial_value->IsTheHole() && !object->IsJSContextExtensionObject()) { LookupResult lookup(isolate); - object->Lookup(*name, &lookup); + object->Lookup(name, &lookup); if (lookup.IsPropertyCallbacks()) { - return ThrowRedeclarationError(isolate, "const", name); + return ThrowRedeclarationError(isolate, name); } } if (object->IsJSGlobalObject()) { // Define own property on the global object. - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes(object, name, value, mode)); } else { - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSReceiver::SetProperty(object, name, value, mode, SLOPPY)); } } @@ -2354,7 +2432,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareContextSlot) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { +RUNTIME_FUNCTION(Runtime_InitializeVarGlobal) { HandleScope scope(isolate); // args[0] == name // args[1] == language_mode @@ -2366,7 +2444,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { bool assign = args.length() == 3; CONVERT_ARG_HANDLE_CHECKED(String, name, 0); - RUNTIME_ASSERT(args[1]->IsSmi()); CONVERT_STRICT_MODE_ARG_CHECKED(strict_mode, 1); // According to ECMA-262, section 12.2, page 62, the property must @@ -2381,7 +2458,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { // Note that objects can have hidden prototypes, so we need to traverse // the whole chain of hidden prototypes to do a 'local' lookup. LookupResult lookup(isolate); - isolate->context()->global_object()->LocalLookup(*name, &lookup, true); + isolate->context()->global_object()->LocalLookup(name, &lookup, true); if (lookup.IsInterceptor()) { Handle<JSObject> holder(lookup.holder()); PropertyAttributes intercepted = @@ -2390,9 +2467,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { // Found an interceptor that's not read only. if (assign) { CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); - Handle<Object> result = JSObject::SetPropertyForResult( - holder, &lookup, name, value, attributes, strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetPropertyForResult( + holder, &lookup, name, value, attributes, strict_mode)); return *result; } else { return isolate->heap()->undefined_value(); @@ -2403,23 +2482,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { if (assign) { CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); Handle<GlobalObject> global(isolate->context()->global_object()); - Handle<Object> result = JSReceiver::SetProperty( - global, name, value, attributes, strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSReceiver::SetProperty(global, name, value, attributes, strict_mode)); return *result; } return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstGlobal) { +RUNTIME_FUNCTION(RuntimeHidden_InitializeConstGlobal) { SealHandleScope shs(isolate); // All constants are declared with an initial value. The name // of the constant is the first argument and the initial value // is the second. RUNTIME_ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(String, name, 0); - Handle<Object> value = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 1); // Get the current global object from top. GlobalObject* global = isolate->context()->global_object(); @@ -2435,11 +2515,11 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstGlobal) { // prototype chain (this rules out using SetProperty). // We use SetLocalPropertyIgnoreAttributes instead LookupResult lookup(isolate); - global->LocalLookup(*name, &lookup); + global->LocalLookup(name, &lookup); if (!lookup.IsFound()) { HandleScope handle_scope(isolate); Handle<GlobalObject> global(isolate->context()->global_object()); - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSObject::SetLocalPropertyIgnoreAttributes(global, name, value, attributes)); @@ -2456,7 +2536,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstGlobal) { // property through an interceptor and only do it if it's // uninitialized, e.g. the hole. Nirk... // Passing sloppy mode because the property is writable. - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSReceiver::SetProperty(global, name, value, attributes, SLOPPY)); return *value; @@ -2489,18 +2569,16 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstGlobal) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstContextSlot) { +RUNTIME_FUNCTION(RuntimeHidden_InitializeConstContextSlot) { HandleScope scope(isolate); ASSERT(args.length() == 3); - Handle<Object> value(args[0], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 0); ASSERT(!value->IsTheHole()); - // Initializations are always done in a function or native context. - RUNTIME_ASSERT(args[1]->IsContext()); - Handle<Context> context(Context::cast(args[1])->declaration_context()); - - Handle<String> name(String::cast(args[2])); + CONVERT_ARG_HANDLE_CHECKED(Context, context_arg, 1); + Handle<Context> context(context_arg->declaration_context()); + CONVERT_ARG_HANDLE_CHECKED(String, name, 2); int index; PropertyAttributes attributes; @@ -2526,7 +2604,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstContextSlot) { Handle<JSObject> global = Handle<JSObject>( isolate->context()->global_object()); // Strict mode not needed (const disallowed in strict mode). - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSReceiver::SetProperty(global, name, value, NONE, SLOPPY)); return *value; @@ -2553,7 +2631,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstContextSlot) { // Set it if it hasn't been set before. NOTE: We cannot use // GetProperty() to get the current value as it 'unholes' the value. LookupResult lookup(isolate); - object->LocalLookupRealNamedProperty(*name, &lookup); + object->LocalLookupRealNamedProperty(name, &lookup); ASSERT(lookup.IsFound()); // the property was declared ASSERT(lookup.IsReadOnly()); // and it was declared as read-only @@ -2577,7 +2655,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstContextSlot) { // read-only property. if ((attributes & READ_ONLY) == 0) { // Strict mode not needed (const disallowed in strict mode). - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSReceiver::SetProperty(object, name, value, attributes, SLOPPY)); } @@ -2587,12 +2665,13 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InitializeConstContextSlot) { } -RUNTIME_FUNCTION(MaybeObject*, - Runtime_OptimizeObjectForAddingMultipleProperties) { +RUNTIME_FUNCTION(Runtime_OptimizeObjectForAddingMultipleProperties) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); CONVERT_SMI_ARG_CHECKED(properties, 1); + // Conservative upper limit to prevent fuzz tests from going OOM. + RUNTIME_ASSERT(properties <= 100000); if (object->HasFastProperties() && !object->IsJSGlobalProxy()) { JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, properties); } @@ -2600,7 +2679,7 @@ RUNTIME_FUNCTION(MaybeObject*, } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_RegExpExec) { +RUNTIME_FUNCTION(RuntimeHidden_RegExpExec) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 0); @@ -2612,52 +2691,36 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_RegExpExec) { RUNTIME_ASSERT(index >= 0); RUNTIME_ASSERT(index <= subject->length()); isolate->counters()->regexp_entry_runtime()->Increment(); - Handle<Object> result = RegExpImpl::Exec(regexp, - subject, - index, - last_match_info); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + RegExpImpl::Exec(regexp, subject, index, last_match_info)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_RegExpConstructResult) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_RegExpConstructResult) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 3); - CONVERT_SMI_ARG_CHECKED(elements_count, 0); - if (elements_count < 0 || - elements_count > FixedArray::kMaxLength || - !Smi::IsValid(elements_count)) { - return isolate->ThrowIllegalOperation(); - } - Object* new_object; - { MaybeObject* maybe_new_object = - isolate->heap()->AllocateFixedArray(elements_count); - if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object; - } - FixedArray* elements = FixedArray::cast(new_object); - { MaybeObject* maybe_new_object = isolate->heap()->AllocateRaw( - JSRegExpResult::kSize, NEW_SPACE, OLD_POINTER_SPACE); - if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object; - } - { - DisallowHeapAllocation no_gc; - HandleScope scope(isolate); - reinterpret_cast<HeapObject*>(new_object)-> - set_map(isolate->native_context()->regexp_result_map()); - } - JSArray* array = JSArray::cast(new_object); - array->set_properties(isolate->heap()->empty_fixed_array()); - array->set_elements(elements); - array->set_length(Smi::FromInt(elements_count)); + CONVERT_SMI_ARG_CHECKED(size, 0); + RUNTIME_ASSERT(size >= 0 && size <= FixedArray::kMaxLength); + CONVERT_ARG_HANDLE_CHECKED(Object, index, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, input, 2); + Handle<FixedArray> elements = isolate->factory()->NewFixedArray(size); + Handle<Map> regexp_map(isolate->native_context()->regexp_result_map()); + Handle<JSObject> object = + isolate->factory()->NewJSObjectFromMap(regexp_map, NOT_TENURED, false); + Handle<JSArray> array = Handle<JSArray>::cast(object); + array->set_elements(*elements); + array->set_length(Smi::FromInt(size)); // Write in-object properties after the length of the array. - array->InObjectPropertyAtPut(JSRegExpResult::kIndexIndex, args[1]); - array->InObjectPropertyAtPut(JSRegExpResult::kInputIndex, args[2]); - return array; + array->InObjectPropertyAtPut(JSRegExpResult::kIndexIndex, *index); + array->InObjectPropertyAtPut(JSRegExpResult::kInputIndex, *input); + return *array; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) { +RUNTIME_FUNCTION(Runtime_RegExpInitializeObject) { HandleScope scope(isolate); ASSERT(args.length() == 5); CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 0); @@ -2701,24 +2764,27 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) { static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE); Handle<Object> zero(Smi::FromInt(0), isolate); Factory* factory = isolate->factory(); - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetLocalPropertyIgnoreAttributes( - regexp, factory->source_string(), source, final)); - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetLocalPropertyIgnoreAttributes( - regexp, factory->global_string(), global, final)); - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetLocalPropertyIgnoreAttributes( - regexp, factory->ignore_case_string(), ignoreCase, final)); - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetLocalPropertyIgnoreAttributes( - regexp, factory->multiline_string(), multiline, final)); - CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetLocalPropertyIgnoreAttributes( - regexp, factory->last_index_string(), zero, writable)); + JSObject::SetLocalPropertyIgnoreAttributes( + regexp, factory->source_string(), source, final).Check(); + JSObject::SetLocalPropertyIgnoreAttributes( + regexp, factory->global_string(), global, final).Check(); + JSObject::SetLocalPropertyIgnoreAttributes( + regexp, factory->ignore_case_string(), ignoreCase, final).Check(); + JSObject::SetLocalPropertyIgnoreAttributes( + regexp, factory->multiline_string(), multiline, final).Check(); + JSObject::SetLocalPropertyIgnoreAttributes( + regexp, factory->last_index_string(), zero, writable).Check(); return *regexp; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FinishArrayPrototypeSetup) { +RUNTIME_FUNCTION(Runtime_FinishArrayPrototypeSetup) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, prototype, 0); + Object* length = prototype->length(); + RUNTIME_ASSERT(length->IsSmi() && Smi::cast(length)->value() == 0); + RUNTIME_ASSERT(prototype->HasFastSmiOrObjectElements()); // This is necessary to enable fast checks for absence of elements // on Array.prototype and below. prototype->set_elements(isolate->heap()->empty_fixed_array()); @@ -2733,18 +2799,19 @@ static Handle<JSFunction> InstallBuiltin(Isolate* isolate, Handle<String> key = isolate->factory()->InternalizeUtf8String(name); Handle<Code> code(isolate->builtins()->builtin(builtin_name)); Handle<JSFunction> optimized = - isolate->factory()->NewFunction(key, + isolate->factory()->NewFunction(MaybeHandle<Object>(), + key, JS_OBJECT_TYPE, JSObject::kHeaderSize, code, false); optimized->shared()->DontAdaptArguments(); - JSReceiver::SetProperty(holder, key, optimized, NONE, STRICT); + JSReceiver::SetProperty(holder, key, optimized, NONE, STRICT).Assert(); return optimized; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SpecialArrayFunctions) { +RUNTIME_FUNCTION(Runtime_SpecialArrayFunctions) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, holder, 0); @@ -2761,16 +2828,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SpecialArrayFunctions) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsSloppyModeFunction) { +RUNTIME_FUNCTION(Runtime_IsSloppyModeFunction) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSReceiver, callable, 0); if (!callable->IsJSFunction()) { HandleScope scope(isolate); - bool threw = false; - Handle<Object> delegate = Execution::TryGetFunctionDelegate( - isolate, Handle<JSReceiver>(callable), &threw); - if (threw) return Failure::Exception(); + Handle<Object> delegate; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, delegate, + Execution::TryGetFunctionDelegate( + isolate, Handle<JSReceiver>(callable))); callable = JSFunction::cast(*delegate); } JSFunction* function = JSFunction::cast(callable); @@ -2779,17 +2847,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsSloppyModeFunction) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) { +RUNTIME_FUNCTION(Runtime_GetDefaultReceiver) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSReceiver, callable, 0); if (!callable->IsJSFunction()) { HandleScope scope(isolate); - bool threw = false; - Handle<Object> delegate = Execution::TryGetFunctionDelegate( - isolate, Handle<JSReceiver>(callable), &threw); - if (threw) return Failure::Exception(); + Handle<Object> delegate; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, delegate, + Execution::TryGetFunctionDelegate( + isolate, Handle<JSReceiver>(callable))); callable = JSFunction::cast(*delegate); } JSFunction* function = JSFunction::cast(callable); @@ -2807,13 +2876,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_MaterializeRegExpLiteral) { +RUNTIME_FUNCTION(RuntimeHidden_MaterializeRegExpLiteral) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0); - int index = args.smi_at(1); - Handle<String> pattern = args.at<String>(2); - Handle<String> flags = args.at<String>(3); + CONVERT_SMI_ARG_CHECKED(index, 1); + CONVERT_ARG_HANDLE_CHECKED(String, pattern, 2); + CONVERT_ARG_HANDLE_CHECKED(String, flags, 3); // Get the RegExp function from the context in the literals array. // This is the RegExp function from the context in which the @@ -2824,20 +2893,16 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_MaterializeRegExpLiteral) { Handle<JSFunction>( JSFunction::NativeContextFromLiterals(*literals)->regexp_function()); // Compute the regular expression literal. - bool has_pending_exception; - Handle<Object> regexp = - RegExpImpl::CreateRegExpLiteral(constructor, pattern, flags, - &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<Object> regexp; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, regexp, + RegExpImpl::CreateRegExpLiteral(constructor, pattern, flags)); literals->set(index, *regexp); return *regexp; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetName) { +RUNTIME_FUNCTION(Runtime_FunctionGetName) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -2846,7 +2911,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetName) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetName) { +RUNTIME_FUNCTION(Runtime_FunctionSetName) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); @@ -2857,7 +2922,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetName) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionNameShouldPrintAsAnonymous) { +RUNTIME_FUNCTION(Runtime_FunctionNameShouldPrintAsAnonymous) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, f, 0); @@ -2866,7 +2931,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionNameShouldPrintAsAnonymous) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) { +RUNTIME_FUNCTION(Runtime_FunctionMarkNameShouldPrintAsAnonymous) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, f, 0); @@ -2875,7 +2940,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsGenerator) { +RUNTIME_FUNCTION(Runtime_FunctionIsGenerator) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, f, 0); @@ -2883,18 +2948,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsGenerator) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) { +RUNTIME_FUNCTION(Runtime_FunctionRemovePrototype) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, f, 0); - f->RemovePrototype(); + RUNTIME_ASSERT(f->RemovePrototype()); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) { +RUNTIME_FUNCTION(Runtime_FunctionGetScript) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -2902,11 +2967,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) { Handle<Object> script = Handle<Object>(fun->shared()->script(), isolate); if (!script->IsScript()) return isolate->heap()->undefined_value(); - return *GetScriptWrapper(Handle<Script>::cast(script)); + return *Script::GetWrapper(Handle<Script>::cast(script)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) { +RUNTIME_FUNCTION(Runtime_FunctionGetSourceCode) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -2916,7 +2981,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScriptSourcePosition) { +RUNTIME_FUNCTION(Runtime_FunctionGetScriptSourcePosition) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -2926,7 +2991,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScriptSourcePosition) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetPositionForOffset) { +RUNTIME_FUNCTION(Runtime_FunctionGetPositionForOffset) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); @@ -2940,7 +3005,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetPositionForOffset) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetInstanceClassName) { +RUNTIME_FUNCTION(Runtime_FunctionSetInstanceClassName) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); @@ -2951,7 +3016,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetInstanceClassName) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetLength) { +RUNTIME_FUNCTION(Runtime_FunctionSetLength) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); @@ -2962,7 +3027,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetLength) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) { +RUNTIME_FUNCTION(Runtime_FunctionSetPrototype) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -2974,35 +3039,36 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_FunctionSetReadOnlyPrototype) { + HandleScope shs(isolate); RUNTIME_ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSFunction, function, 0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); - String* name = isolate->heap()->prototype_string(); + Handle<String> name = isolate->factory()->prototype_string(); if (function->HasFastProperties()) { // Construct a new field descriptor with updated attributes. - DescriptorArray* instance_desc = function->map()->instance_descriptors(); + Handle<DescriptorArray> instance_desc = + handle(function->map()->instance_descriptors()); - int index = instance_desc->SearchWithCache(name, function->map()); + int index = instance_desc->SearchWithCache(*name, function->map()); ASSERT(index != DescriptorArray::kNotFound); PropertyDetails details = instance_desc->GetDetails(index); - CallbacksDescriptor new_desc(name, - instance_desc->GetValue(index), + CallbacksDescriptor new_desc( + name, + handle(instance_desc->GetValue(index), isolate), static_cast<PropertyAttributes>(details.attributes() | READ_ONLY)); // Create a new map featuring the new field descriptors array. - Map* new_map; - MaybeObject* maybe_map = - function->map()->CopyReplaceDescriptor( - instance_desc, &new_desc, index, OMIT_TRANSITION); - if (!maybe_map->To(&new_map)) return maybe_map; + Handle<Map> map = handle(function->map()); + Handle<Map> new_map = Map::CopyReplaceDescriptor( + map, instance_desc, &new_desc, index, OMIT_TRANSITION); - function->set_map(new_map); + JSObject::MigrateToMap(function, new_map); } else { // Dictionary properties. // Directly manipulate the property details. + DisallowHeapAllocation no_gc; int entry = function->property_dictionary()->FindEntry(name); ASSERT(entry != NameDictionary::kNotFound); PropertyDetails details = function->property_dictionary()->DetailsAt(entry); @@ -3012,11 +3078,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) { details.dictionary_index()); function->property_dictionary()->DetailsAtPut(entry, new_details); } - return function; + return *function; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) { +RUNTIME_FUNCTION(Runtime_FunctionIsAPIFunction) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -3025,7 +3091,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsBuiltin) { +RUNTIME_FUNCTION(Runtime_FunctionIsBuiltin) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -3034,21 +3100,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsBuiltin) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) { +RUNTIME_FUNCTION(Runtime_SetCode) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSFunction, target, 0); - Handle<Object> code = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, source, 1); - if (code->IsNull()) return *target; - RUNTIME_ASSERT(code->IsJSFunction()); - Handle<JSFunction> source = Handle<JSFunction>::cast(code); Handle<SharedFunctionInfo> target_shared(target->shared()); Handle<SharedFunctionInfo> source_shared(source->shared()); + RUNTIME_ASSERT(!source_shared->bound()); if (!Compiler::EnsureCompiled(source, KEEP_EXCEPTION)) { - return Failure::Exception(); + return isolate->heap()->exception(); } // Mark both, the source and the target, as un-flushable because the @@ -3063,6 +3127,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) { target_shared->ReplaceCode(source_shared->code()); target_shared->set_scope_info(source_shared->scope_info()); target_shared->set_length(source_shared->length()); + target_shared->set_feedback_vector(source_shared->feedback_vector()); target_shared->set_formal_parameter_count( source_shared->formal_parameter_count()); target_shared->set_script(source_shared->script()); @@ -3072,6 +3137,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) { bool was_native = target_shared->native(); target_shared->set_compiler_hints(source_shared->compiler_hints()); target_shared->set_native(was_native); + target_shared->set_profiler_ticks(source_shared->profiler_ticks()); // Set the code of the target function. target->ReplaceCode(source_shared->code()); @@ -3100,7 +3166,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetExpectedNumberOfProperties) { +RUNTIME_FUNCTION(Runtime_SetExpectedNumberOfProperties) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0); @@ -3114,9 +3180,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetExpectedNumberOfProperties) { if (!func->shared()->live_objects_may_exist()) { func->shared()->set_expected_nof_properties(num); if (func->has_initial_map()) { - Handle<Map> new_initial_map = - func->GetIsolate()->factory()->CopyMap( - Handle<Map>(func->initial_map())); + Handle<Map> new_initial_map = Map::Copy(handle(func->initial_map())); new_initial_map->set_unused_property_fields(num); func->set_initial_map(*new_initial_map); } @@ -3125,7 +3189,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetExpectedNumberOfProperties) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateJSGeneratorObject) { +RUNTIME_FUNCTION(RuntimeHidden_CreateJSGeneratorObject) { HandleScope scope(isolate); ASSERT(args.length() == 0); @@ -3151,10 +3215,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CreateJSGeneratorObject) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_SuspendJSGeneratorObject) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_SuspendJSGeneratorObject) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSGeneratorObject, generator_object, 0); + CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator_object, 0); JavaScriptFrameIterator stack_iterator(isolate); JavaScriptFrame* frame = stack_iterator.frame(); @@ -3183,11 +3247,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_SuspendJSGeneratorObject) { ASSERT(!frame->HasHandler()); } else { int stack_handler_index = -1; - MaybeObject* alloc = isolate->heap()->AllocateFixedArray(operands_count); - FixedArray* operand_stack; - if (!alloc->To(&operand_stack)) return alloc; - frame->SaveOperandStack(operand_stack, &stack_handler_index); - generator_object->set_operand_stack(operand_stack); + Handle<FixedArray> operand_stack = + isolate->factory()->NewFixedArray(operands_count); + frame->SaveOperandStack(*operand_stack, &stack_handler_index); + generator_object->set_operand_stack(*operand_stack); generator_object->set_stack_handler_index(stack_handler_index); } @@ -3202,7 +3265,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_SuspendJSGeneratorObject) { // inlined into GeneratorNext and GeneratorThrow. EmitGeneratorResumeResume is // called in any case, as it needs to reconstruct the stack frame and make space // for arguments and operands. -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ResumeJSGeneratorObject) { +RUNTIME_FUNCTION(RuntimeHidden_ResumeJSGeneratorObject) { SealHandleScope shs(isolate); ASSERT(args.length() == 3); CONVERT_ARG_CHECKED(JSGeneratorObject, generator_object, 0); @@ -3250,7 +3313,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ResumeJSGeneratorObject) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowGeneratorStateError) { +RUNTIME_FUNCTION(RuntimeHidden_ThrowGeneratorStateError) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator, 0); @@ -3263,41 +3326,27 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowGeneratorStateError) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectFreeze) { +RUNTIME_FUNCTION(Runtime_ObjectFreeze) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); - Handle<Object> result = JSObject::Freeze(object); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, JSObject::Freeze(object)); return *result; } -MUST_USE_RESULT static MaybeObject* CharFromCode(Isolate* isolate, - Object* char_code) { - if (char_code->IsNumber()) { - return isolate->heap()->LookupSingleCharacterStringFromCode( - NumberToUint32(char_code) & 0xffff); - } - return isolate->heap()->empty_string(); -} - - -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StringCharCodeAt) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_StringCharCodeAt) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(String, subject, 0); + CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); CONVERT_NUMBER_CHECKED(uint32_t, i, Uint32, args[1]); // Flatten the string. If someone wants to get a char at an index // in a cons string, it is likely that more indices will be // accessed. - Object* flat; - { MaybeObject* maybe_flat = subject->TryFlatten(); - if (!maybe_flat->ToObject(&flat)) return maybe_flat; - } - subject = String::cast(flat); + subject = String::Flatten(subject); if (i >= static_cast<uint32_t>(subject->length())) { return isolate->heap()->nan_value(); @@ -3307,10 +3356,15 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StringCharCodeAt) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CharFromCode) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_CharFromCode) { + HandleScope handlescope(isolate); ASSERT(args.length() == 1); - return CharFromCode(isolate, args[0]); + if (args[0]->IsNumber()) { + CONVERT_NUMBER_CHECKED(uint32_t, code, Uint32, args[0]); + code &= 0xffff; + return *isolate->factory()->LookupSingleCharacterStringFromCode(code); + } + return isolate->heap()->empty_string(); } @@ -3470,15 +3524,20 @@ class ReplacementStringBuilder { } - Handle<String> ToString() { + MaybeHandle<String> ToString() { + Isolate* isolate = heap_->isolate(); if (array_builder_.length() == 0) { - return heap_->isolate()->factory()->empty_string(); + return isolate->factory()->empty_string(); } Handle<String> joined_string; if (is_ascii_) { - Handle<SeqOneByteString> seq = NewRawOneByteString(character_count_); - RETURN_IF_EMPTY_HANDLE_VALUE(heap_->isolate(), seq, Handle<String>()); + Handle<SeqOneByteString> seq; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, seq, + isolate->factory()->NewRawOneByteString(character_count_), + String); + DisallowHeapAllocation no_gc; uint8_t* char_buffer = seq->GetChars(); StringBuilderConcatHelper(*subject_, @@ -3488,8 +3547,12 @@ class ReplacementStringBuilder { joined_string = Handle<String>::cast(seq); } else { // Non-ASCII. - Handle<SeqTwoByteString> seq = NewRawTwoByteString(character_count_); - RETURN_IF_EMPTY_HANDLE_VALUE(heap_->isolate(), seq, Handle<String>()); + Handle<SeqTwoByteString> seq; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, seq, + isolate->factory()->NewRawTwoByteString(character_count_), + String); + DisallowHeapAllocation no_gc; uc16* char_buffer = seq->GetChars(); StringBuilderConcatHelper(*subject_, @@ -3512,16 +3575,6 @@ class ReplacementStringBuilder { } private: - Handle<SeqOneByteString> NewRawOneByteString(int length) { - return heap_->isolate()->factory()->NewRawOneByteString(length); - } - - - Handle<SeqTwoByteString> NewRawTwoByteString(int length) { - return heap_->isolate()->factory()->NewRawTwoByteString(length); - } - - void AddElement(Object* element) { ASSERT(element->IsSmi() || element->IsString()); ASSERT(array_builder_.capacity() > array_builder_.length()); @@ -3959,7 +4012,7 @@ void FindStringIndicesDispatch(Isolate* isolate, template<typename ResultSeqString> -MUST_USE_RESULT static MaybeObject* StringReplaceGlobalAtomRegExpWithString( +MUST_USE_RESULT static Object* StringReplaceGlobalAtomRegExpWithString( Isolate* isolate, Handle<String> subject, Handle<JSRegExp> pattern_regexp, @@ -4000,14 +4053,15 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalAtomRegExpWithString( int subject_pos = 0; int result_pos = 0; - Handle<String> result_seq; + MaybeHandle<SeqString> maybe_res; if (ResultSeqString::kHasAsciiEncoding) { - result_seq = isolate->factory()->NewRawOneByteString(result_len); + maybe_res = isolate->factory()->NewRawOneByteString(result_len); } else { - result_seq = isolate->factory()->NewRawTwoByteString(result_len); + maybe_res = isolate->factory()->NewRawTwoByteString(result_len); } - RETURN_IF_EMPTY_HANDLE(isolate, result_seq); - Handle<ResultSeqString> result = Handle<ResultSeqString>::cast(result_seq); + Handle<SeqString> untyped_res; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, untyped_res, maybe_res); + Handle<ResultSeqString> result = Handle<ResultSeqString>::cast(untyped_res); for (int i = 0; i < matches; i++) { // Copy non-matched subject content. @@ -4046,7 +4100,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalAtomRegExpWithString( } -MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithString( +MUST_USE_RESULT static Object* StringReplaceGlobalRegExpWithString( Isolate* isolate, Handle<String> subject, Handle<JSRegExp> regexp, @@ -4078,11 +4132,11 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithString( } RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); int32_t* current_match = global_cache.FetchNext(); if (current_match == NULL) { - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); return *subject; } @@ -4124,7 +4178,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithString( current_match = global_cache.FetchNext(); } while (current_match != NULL); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); if (prev < subject_length) { builder.EnsureCapacity(2); @@ -4136,14 +4190,14 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithString( capture_count, global_cache.LastSuccessfulMatch()); - Handle<String> result = builder.ToString(); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<String> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, builder.ToString()); return *result; } template <typename ResultSeqString> -MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithEmptyString( +MUST_USE_RESULT static Object* StringReplaceGlobalRegExpWithEmptyString( Isolate* isolate, Handle<String> subject, Handle<JSRegExp> regexp, @@ -4163,11 +4217,11 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithEmptyString( } RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); int32_t* current_match = global_cache.FetchNext(); if (current_match == NULL) { - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); return *subject; } @@ -4182,12 +4236,11 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithEmptyString( Handle<ResultSeqString> answer; if (ResultSeqString::kHasAsciiEncoding) { answer = Handle<ResultSeqString>::cast( - isolate->factory()->NewRawOneByteString(new_length)); + isolate->factory()->NewRawOneByteString(new_length).ToHandleChecked()); } else { answer = Handle<ResultSeqString>::cast( - isolate->factory()->NewRawTwoByteString(new_length)); + isolate->factory()->NewRawTwoByteString(new_length).ToHandleChecked()); } - ASSERT(!answer.is_null()); int prev = 0; int position = 0; @@ -4205,7 +4258,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithEmptyString( current_match = global_cache.FetchNext(); } while (current_match != NULL); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); RegExpImpl::SetLastMatchInfo(last_match_info, subject, @@ -4231,13 +4284,18 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithEmptyString( Address end_of_string = answer->address() + string_size; Heap* heap = isolate->heap(); + + // The trimming is performed on a newly allocated object, which is on a + // fresly allocated page or on an already swept page. Hence, the sweeper + // thread can not get confused with the filler creation. No synchronization + // needed. heap->CreateFillerObjectAt(end_of_string, delta); heap->AdjustLiveBytes(answer->address(), -delta, Heap::FROM_MUTATOR); return *answer; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceGlobalRegExpWithString) { +RUNTIME_FUNCTION(Runtime_StringReplaceGlobalRegExpWithString) { HandleScope scope(isolate); ASSERT(args.length() == 4); @@ -4246,9 +4304,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceGlobalRegExpWithString) { CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 3); - ASSERT(regexp->GetFlags().is_global()); + RUNTIME_ASSERT(regexp->GetFlags().is_global()); - if (!subject->IsFlat()) subject = FlattenGetString(subject); + subject = String::Flatten(subject); if (replacement->length() == 0) { if (subject->HasOnlyOneByteChars()) { @@ -4260,42 +4318,41 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceGlobalRegExpWithString) { } } - if (!replacement->IsFlat()) replacement = FlattenGetString(replacement); + replacement = String::Flatten(replacement); return StringReplaceGlobalRegExpWithString( isolate, subject, regexp, replacement, last_match_info); } -Handle<String> StringReplaceOneCharWithString(Isolate* isolate, - Handle<String> subject, - Handle<String> search, - Handle<String> replace, - bool* found, - int recursion_limit) { - if (recursion_limit == 0) return Handle<String>::null(); +// This may return an empty MaybeHandle if an exception is thrown or +// we abort due to reaching the recursion limit. +MaybeHandle<String> StringReplaceOneCharWithString(Isolate* isolate, + Handle<String> subject, + Handle<String> search, + Handle<String> replace, + bool* found, + int recursion_limit) { + if (recursion_limit == 0) return MaybeHandle<String>(); + recursion_limit--; if (subject->IsConsString()) { ConsString* cons = ConsString::cast(*subject); Handle<String> first = Handle<String>(cons->first()); Handle<String> second = Handle<String>(cons->second()); - Handle<String> new_first = - StringReplaceOneCharWithString(isolate, - first, - search, - replace, - found, - recursion_limit - 1); - if (new_first.is_null()) return new_first; + Handle<String> new_first; + if (!StringReplaceOneCharWithString( + isolate, first, search, replace, found, recursion_limit) + .ToHandle(&new_first)) { + return MaybeHandle<String>(); + } if (*found) return isolate->factory()->NewConsString(new_first, second); - Handle<String> new_second = - StringReplaceOneCharWithString(isolate, - second, - search, - replace, - found, - recursion_limit - 1); - if (new_second.is_null()) return new_second; + Handle<String> new_second; + if (!StringReplaceOneCharWithString( + isolate, second, search, replace, found, recursion_limit) + .ToHandle(&new_second)) { + return MaybeHandle<String>(); + } if (*found) return isolate->factory()->NewConsString(first, new_second); return subject; @@ -4304,8 +4361,11 @@ Handle<String> StringReplaceOneCharWithString(Isolate* isolate, if (index == -1) return subject; *found = true; Handle<String> first = isolate->factory()->NewSubString(subject, 0, index); - Handle<String> cons1 = isolate->factory()->NewConsString(first, replace); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, cons1, Handle<String>()); + Handle<String> cons1; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, cons1, + isolate->factory()->NewConsString(first, replace), + String); Handle<String> second = isolate->factory()->NewSubString(subject, index + 1, subject->length()); return isolate->factory()->NewConsString(cons1, second); @@ -4313,7 +4373,7 @@ Handle<String> StringReplaceOneCharWithString(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceOneCharWithString) { +RUNTIME_FUNCTION(Runtime_StringReplaceOneCharWithString) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); @@ -4324,20 +4384,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceOneCharWithString) { // retry with a flattened subject string. const int kRecursionLimit = 0x1000; bool found = false; - Handle<String> result = StringReplaceOneCharWithString(isolate, - subject, - search, - replace, - &found, - kRecursionLimit); - if (!result.is_null()) return *result; - if (isolate->has_pending_exception()) return Failure::Exception(); - return *StringReplaceOneCharWithString(isolate, - FlattenGetString(subject), - search, - replace, - &found, - kRecursionLimit); + Handle<String> result; + if (StringReplaceOneCharWithString( + isolate, subject, search, replace, &found, kRecursionLimit) + .ToHandle(&result)) { + return *result; + } + if (isolate->has_pending_exception()) return isolate->heap()->exception(); + + subject = String::Flatten(subject); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + StringReplaceOneCharWithString( + isolate, subject, search, replace, &found, kRecursionLimit)); + return *result; } @@ -4357,8 +4417,8 @@ int Runtime::StringMatch(Isolate* isolate, int subject_length = sub->length(); if (start_index + pattern_length > subject_length) return -1; - if (!sub->IsFlat()) FlattenString(sub); - if (!pat->IsFlat()) FlattenString(pat); + sub = String::Flatten(sub); + pat = String::Flatten(pat); DisallowHeapAllocation no_gc; // ensure vectors stay valid // Extract flattened substrings of cons strings before determining asciiness. @@ -4393,20 +4453,19 @@ int Runtime::StringMatch(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringIndexOf) { +RUNTIME_FUNCTION(Runtime_StringIndexOf) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, sub, 0); CONVERT_ARG_HANDLE_CHECKED(String, pat, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, index, 2); - Object* index = args[2]; uint32_t start_index; if (!index->ToArrayIndex(&start_index)) return Smi::FromInt(-1); RUNTIME_ASSERT(start_index <= static_cast<uint32_t>(sub->length())); - int position = - Runtime::StringMatch(isolate, sub, pat, start_index); + int position = Runtime::StringMatch(isolate, sub, pat, start_index); return Smi::FromInt(position); } @@ -4446,14 +4505,14 @@ static int StringMatchBackwards(Vector<const schar> subject, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLastIndexOf) { +RUNTIME_FUNCTION(Runtime_StringLastIndexOf) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, sub, 0); CONVERT_ARG_HANDLE_CHECKED(String, pat, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, index, 2); - Object* index = args[2]; uint32_t start_index; if (!index->ToArrayIndex(&start_index)) return Smi::FromInt(-1); @@ -4468,8 +4527,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLastIndexOf) { return Smi::FromInt(start_index); } - if (!sub->IsFlat()) FlattenString(sub); - if (!pat->IsFlat()) FlattenString(pat); + sub = String::Flatten(sub); + pat = String::Flatten(pat); int position = -1; DisallowHeapAllocation no_gc; // ensure vectors stay valid @@ -4505,14 +4564,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLastIndexOf) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLocaleCompare) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_StringLocaleCompare) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(String, str1, 0); - CONVERT_ARG_CHECKED(String, str2, 1); + CONVERT_ARG_HANDLE_CHECKED(String, str1, 0); + CONVERT_ARG_HANDLE_CHECKED(String, str2, 1); - if (str1 == str2) return Smi::FromInt(0); // Equal. + if (str1.is_identical_to(str2)) return Smi::FromInt(0); // Equal. int str1_length = str1->length(); int str2_length = str2->length(); @@ -4532,28 +4591,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLocaleCompare) { int d = str1->Get(0) - str2->Get(0); if (d != 0) return Smi::FromInt(d); - str1->TryFlatten(); - str2->TryFlatten(); + str1 = String::Flatten(str1); + str2 = String::Flatten(str2); - ConsStringIteratorOp* op1 = - isolate->runtime_state()->string_locale_compare_it1(); - ConsStringIteratorOp* op2 = - isolate->runtime_state()->string_locale_compare_it2(); - // TODO(dcarney) Can do array compares here more efficiently. - StringCharacterStream stream1(str1, op1); - StringCharacterStream stream2(str2, op2); + DisallowHeapAllocation no_gc; + String::FlatContent flat1 = str1->GetFlatContent(); + String::FlatContent flat2 = str2->GetFlatContent(); for (int i = 0; i < end; i++) { - uint16_t char1 = stream1.GetNext(); - uint16_t char2 = stream2.GetNext(); - if (char1 != char2) return Smi::FromInt(char1 - char2); + if (flat1.Get(i) != flat2.Get(i)) { + return Smi::FromInt(flat1.Get(i) - flat2.Get(i)); + } } return Smi::FromInt(str1_length - str2_length); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_SubString) { +RUNTIME_FUNCTION(RuntimeHidden_SubString) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -4581,16 +4636,18 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_SubString) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) { +RUNTIME_FUNCTION(Runtime_StringMatch) { HandleScope handles(isolate); - ASSERT_EQ(3, args.length()); + ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, regexp_info, 2); + RUNTIME_ASSERT(regexp_info->HasFastObjectElements()); + RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); int capture_count = regexp->CaptureCount(); @@ -4604,7 +4661,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) { offsets.Add(match[1], zone_scope.zone()); // end } - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); if (offsets.length() == 0) { // Not a single match. @@ -4638,7 +4695,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) { // Only called from Runtime_RegExpExecMultiple so it doesn't need to maintain // separate last match info. See comment on that function. template<bool has_capture> -static MaybeObject* SearchRegExpMultiple( +static Object* SearchRegExpMultiple( Isolate* isolate, Handle<String> subject, Handle<JSRegExp> regexp, @@ -4676,7 +4733,7 @@ static MaybeObject* SearchRegExpMultiple( } RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate); - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); Handle<FixedArray> result_elements; if (result_array->HasFastObjectElements()) { @@ -4752,7 +4809,7 @@ static MaybeObject* SearchRegExpMultiple( } } - if (global_cache.HasException()) return Failure::Exception(); + if (global_cache.HasException()) return isolate->heap()->exception(); if (match_start >= 0) { // Finished matching, with at least one match. @@ -4773,10 +4830,10 @@ static MaybeObject* SearchRegExpMultiple( fixed_array->set(fixed_array->length() - 1, Smi::FromInt(builder.length())); // Cache the result and turn the FixedArray into a COW array. - RegExpResultsCache::Enter(isolate->heap(), - *subject, - regexp->data(), - *fixed_array, + RegExpResultsCache::Enter(isolate, + subject, + handle(regexp->data(), isolate), + fixed_array, RegExpResultsCache::REGEXP_MULTIPLE_INDICES); } return *builder.ToJSArray(result_array); @@ -4789,16 +4846,16 @@ static MaybeObject* SearchRegExpMultiple( // This is only called for StringReplaceGlobalRegExpWithFunction. This sets // lastMatchInfoOverride to maintain the last match info, so we don't need to // set any other last match array info. -RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) { +RUNTIME_FUNCTION(Runtime_RegExpExecMultiple) { HandleScope handles(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(String, subject, 1); - if (!subject->IsFlat()) FlattenString(subject); CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 0); CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, result_array, 3); + subject = String::Flatten(subject); ASSERT(regexp->GetFlags().is_global()); if (regexp->CaptureCount() == 0) { @@ -4811,8 +4868,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToRadixString) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToRadixString) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(radix, 1); RUNTIME_ASSERT(2 <= radix && radix <= 36); @@ -4823,7 +4880,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToRadixString) { if (value >= 0 && value < radix) { // Character array used for conversion. static const char kCharTable[] = "0123456789abcdefghijklmnopqrstuvwxyz"; - return isolate->heap()-> + return *isolate->factory()-> LookupSingleCharacterStringFromCode(kCharTable[value]); } } @@ -4831,40 +4888,39 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToRadixString) { // Slow case. CONVERT_DOUBLE_ARG_CHECKED(value, 0); if (std::isnan(value)) { - return *isolate->factory()->nan_string(); + return isolate->heap()->nan_string(); } if (std::isinf(value)) { if (value < 0) { - return *isolate->factory()->minus_infinity_string(); + return isolate->heap()->minus_infinity_string(); } - return *isolate->factory()->infinity_string(); + return isolate->heap()->infinity_string(); } char* str = DoubleToRadixCString(value, radix); - MaybeObject* result = - isolate->heap()->AllocateStringFromOneByte(CStrVector(str)); + Handle<String> result = isolate->factory()->NewStringFromAsciiChecked(str); DeleteArray(str); - return result; + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToFixed) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToFixed) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(value, 0); CONVERT_DOUBLE_ARG_CHECKED(f_number, 1); int f = FastD2IChecked(f_number); - RUNTIME_ASSERT(f >= 0); + // See DoubleToFixedCString for these constants: + RUNTIME_ASSERT(f >= 0 && f <= 20); char* str = DoubleToFixedCString(value, f); - MaybeObject* res = - isolate->heap()->AllocateStringFromOneByte(CStrVector(str)); + Handle<String> result = isolate->factory()->NewStringFromAsciiChecked(str); DeleteArray(str); - return res; + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToExponential) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToExponential) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(value, 0); @@ -4872,15 +4928,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToExponential) { int f = FastD2IChecked(f_number); RUNTIME_ASSERT(f >= -1 && f <= 20); char* str = DoubleToExponentialCString(value, f); - MaybeObject* res = - isolate->heap()->AllocateStringFromOneByte(CStrVector(str)); + Handle<String> result = isolate->factory()->NewStringFromAsciiChecked(str); DeleteArray(str); - return res; + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToPrecision) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToPrecision) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(value, 0); @@ -4888,23 +4943,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToPrecision) { int f = FastD2IChecked(f_number); RUNTIME_ASSERT(f >= 1 && f <= 21); char* str = DoubleToPrecisionCString(value, f); - MaybeObject* res = - isolate->heap()->AllocateStringFromOneByte(CStrVector(str)); + Handle<String> result = isolate->factory()->NewStringFromAsciiChecked(str); DeleteArray(str); - return res; + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsValidSmi) { - HandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_IsValidSmi) { + SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_NUMBER_CHECKED(int32_t, number, Int32, args[0]); - if (Smi::IsValid(number)) { - return isolate->heap()->true_value(); - } else { - return isolate->heap()->false_value(); - } + return isolate->heap()->ToBoolean(Smi::IsValid(number)); } @@ -4912,18 +4962,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsValidSmi) { // string->Get(index). static Handle<Object> GetCharAt(Handle<String> string, uint32_t index) { if (index < static_cast<uint32_t>(string->length())) { - string->TryFlatten(); - return LookupSingleCharacterStringFromCode( - string->GetIsolate(), - string->Get(index)); + Factory* factory = string->GetIsolate()->factory(); + return factory->LookupSingleCharacterStringFromCode( + String::Flatten(string)->Get(index)); } return Execution::CharAt(string, index); } -Handle<Object> Runtime::GetElementOrCharAt(Isolate* isolate, - Handle<Object> object, - uint32_t index) { +MaybeHandle<Object> Runtime::GetElementOrCharAt(Isolate* isolate, + Handle<Object> object, + uint32_t index) { // Handle [] indexing on Strings if (object->IsString()) { Handle<Object> result = GetCharAt(Handle<String>::cast(object), index); @@ -4948,98 +4997,88 @@ Handle<Object> Runtime::GetElementOrCharAt(Isolate* isolate, } -static Handle<Name> ToName(Isolate* isolate, Handle<Object> key) { +MUST_USE_RESULT +static MaybeHandle<Name> ToName(Isolate* isolate, Handle<Object> key) { if (key->IsName()) { return Handle<Name>::cast(key); } else { - bool has_pending_exception = false; - Handle<Object> converted = - Execution::ToString(isolate, key, &has_pending_exception); - if (has_pending_exception) return Handle<Name>(); + Handle<Object> converted; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, converted, Execution::ToString(isolate, key), Name); return Handle<Name>::cast(converted); } } -MaybeObject* Runtime::HasObjectProperty(Isolate* isolate, - Handle<JSReceiver> object, - Handle<Object> key) { - HandleScope scope(isolate); - +MaybeHandle<Object> Runtime::HasObjectProperty(Isolate* isolate, + Handle<JSReceiver> object, + Handle<Object> key) { // Check if the given key is an array index. uint32_t index; if (key->ToArrayIndex(&index)) { - return isolate->heap()->ToBoolean(JSReceiver::HasElement(object, index)); + return isolate->factory()->ToBoolean(JSReceiver::HasElement(object, index)); } // Convert the key to a name - possibly by calling back into JavaScript. - Handle<Name> name = ToName(isolate, key); - RETURN_IF_EMPTY_HANDLE(isolate, name); - - return isolate->heap()->ToBoolean(JSReceiver::HasProperty(object, name)); -} + Handle<Name> name; + ASSIGN_RETURN_ON_EXCEPTION(isolate, name, ToName(isolate, key), Object); -MaybeObject* Runtime::GetObjectPropertyOrFail( - Isolate* isolate, - Handle<Object> object, - Handle<Object> key) { - CALL_HEAP_FUNCTION_PASS_EXCEPTION(isolate, - GetObjectProperty(isolate, object, key)); + return isolate->factory()->ToBoolean(JSReceiver::HasProperty(object, name)); } -MaybeObject* Runtime::GetObjectProperty(Isolate* isolate, - Handle<Object> object, - Handle<Object> key) { - HandleScope scope(isolate); +MaybeHandle<Object> Runtime::GetObjectProperty(Isolate* isolate, + Handle<Object> object, + Handle<Object> key) { if (object->IsUndefined() || object->IsNull()) { Handle<Object> args[2] = { key, object }; - Handle<Object> error = + return isolate->Throw<Object>( isolate->factory()->NewTypeError("non_object_property_load", - HandleVector(args, 2)); - return isolate->Throw(*error); + HandleVector(args, 2))); } // Check if the given key is an array index. uint32_t index; if (key->ToArrayIndex(&index)) { - Handle<Object> result = GetElementOrCharAt(isolate, object, index); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + return GetElementOrCharAt(isolate, object, index); } // Convert the key to a name - possibly by calling back into JavaScript. - Handle<Name> name = ToName(isolate, key); - RETURN_IF_EMPTY_HANDLE(isolate, name); + Handle<Name> name; + ASSIGN_RETURN_ON_EXCEPTION(isolate, name, ToName(isolate, key), Object); // Check if the name is trivially convertible to an index and get // the element if so. if (name->AsArrayIndex(&index)) { - Handle<Object> result = GetElementOrCharAt(isolate, object, index); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + return GetElementOrCharAt(isolate, object, index); } else { - return object->GetProperty(*name); + return Object::GetProperty(object, name); } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetProperty) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_GetProperty) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - Handle<Object> object = args.at<Object>(0); - Handle<Object> key = args.at<Object>(1); - - return Runtime::GetObjectProperty(isolate, object, key); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, key, 1); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::GetObjectProperty(isolate, object, key)); + return *result; } // KeyedGetProperty is called from KeyedLoadIC::GenerateGeneric. -RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_KeyedGetProperty) { + HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(Object, receiver_obj, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, key_obj, 1); + // Fast cases for getting named properties of the receiver JSObject // itself. // @@ -5051,15 +5090,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { // // Additionally, we need to make sure that we do not cache results // for objects that require access checks. - if (args[0]->IsJSObject()) { - if (!args[0]->IsJSGlobalProxy() && - !args[0]->IsAccessCheckNeeded() && - args[1]->IsName()) { - JSObject* receiver = JSObject::cast(args[0]); - Name* key = Name::cast(args[1]); + if (receiver_obj->IsJSObject()) { + if (!receiver_obj->IsJSGlobalProxy() && + !receiver_obj->IsAccessCheckNeeded() && + key_obj->IsName()) { + DisallowHeapAllocation no_allocation; + Handle<JSObject> receiver = Handle<JSObject>::cast(receiver_obj); + Handle<Name> key = Handle<Name>::cast(key_obj); if (receiver->HasFastProperties()) { // Attempt to use lookup cache. - Map* receiver_map = receiver->map(); + Handle<Map> receiver_map(receiver->map(), isolate); KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache(); int offset = keyed_lookup_cache->Lookup(receiver_map, key); if (offset != -1) { @@ -5080,7 +5120,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { if (!result.representation().IsDouble()) { keyed_lookup_cache->Update(receiver_map, key, offset); } - return receiver->FastPropertyAt(result.representation(), offset); + AllowHeapAllocation allow_allocation; + return *JSObject::FastPropertyAt( + receiver, result.representation(), offset); } } else { // Attempt dictionary lookup. @@ -5095,48 +5137,46 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { // If value is the hole do the general lookup. } } - } else if (FLAG_smi_only_arrays && args.at<Object>(1)->IsSmi()) { + } else if (FLAG_smi_only_arrays && key_obj->IsSmi()) { // JSObject without a name key. If the key is a Smi, check for a // definite out-of-bounds access to elements, which is a strong indicator // that subsequent accesses will also call the runtime. Proactively // transition elements to FAST_*_ELEMENTS to avoid excessive boxing of // doubles for those future calls in the case that the elements would // become FAST_DOUBLE_ELEMENTS. - Handle<JSObject> js_object(args.at<JSObject>(0)); + Handle<JSObject> js_object = Handle<JSObject>::cast(receiver_obj); ElementsKind elements_kind = js_object->GetElementsKind(); if (IsFastDoubleElementsKind(elements_kind)) { - FixedArrayBase* elements = js_object->elements(); - if (args.at<Smi>(1)->value() >= elements->length()) { + Handle<Smi> key = Handle<Smi>::cast(key_obj); + if (key->value() >= js_object->elements()->length()) { if (IsFastHoleyElementsKind(elements_kind)) { elements_kind = FAST_HOLEY_ELEMENTS; } else { elements_kind = FAST_ELEMENTS; } - MaybeObject* maybe_object = TransitionElements(js_object, - elements_kind, - isolate); - if (maybe_object->IsFailure()) return maybe_object; + RETURN_FAILURE_ON_EXCEPTION( + isolate, TransitionElements(js_object, elements_kind, isolate)); } } else { ASSERT(IsFastSmiOrObjectElementsKind(elements_kind) || !IsFastElementsKind(elements_kind)); } } - } else if (args[0]->IsString() && args[1]->IsSmi()) { + } else if (receiver_obj->IsString() && key_obj->IsSmi()) { // Fast case for string indexing using [] with a smi index. - HandleScope scope(isolate); - Handle<String> str = args.at<String>(0); + Handle<String> str = Handle<String>::cast(receiver_obj); int index = args.smi_at(1); if (index >= 0 && index < str->length()) { - Handle<Object> result = GetCharAt(str, index); - return *result; + return *GetCharAt(str, index); } } // Fall back to GetObjectProperty. - return Runtime::GetObjectProperty(isolate, - args.at<Object>(0), - args.at<Object>(1)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::GetObjectProperty(isolate, receiver_obj, key_obj)); + return *result; } @@ -5151,7 +5191,7 @@ static bool IsValidAccessor(Handle<Object> obj) { // Steps 9c & 12 - replace an existing data property with an accessor property. // Step 12 - update an existing accessor property with an accessor or generic // descriptor. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) { +RUNTIME_FUNCTION(Runtime_DefineOrRedefineAccessorProperty) { HandleScope scope(isolate); ASSERT(args.length() == 5); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); @@ -5166,8 +5206,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) { PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked); bool fast = obj->HasFastProperties(); + // DefineAccessor checks access rights. JSObject::DefineAccessor(obj, name, getter, setter, attr); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); if (fast) JSObject::TransformToFastProperties(obj, 0); return isolate->heap()->undefined_value(); } @@ -5179,7 +5220,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) { // Steps 9b & 12 - replace an existing accessor property with a data property. // Step 12 - update an existing data property with a data or generic // descriptor. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { +RUNTIME_FUNCTION(Runtime_DefineOrRedefineDataProperty) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSObject, js_object, 0); @@ -5189,31 +5230,34 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0); PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked); + // Check access rights if needed. + if (js_object->IsAccessCheckNeeded() && + !isolate->MayNamedAccess(js_object, name, v8::ACCESS_SET)) { + return isolate->heap()->undefined_value(); + } + LookupResult lookup(isolate); - js_object->LocalLookupRealNamedProperty(*name, &lookup); + js_object->LocalLookupRealNamedProperty(name, &lookup); // Special case for callback properties. if (lookup.IsPropertyCallbacks()) { Handle<Object> callback(lookup.GetCallbackObject(), isolate); - // To be compatible with Safari we do not change the value on API objects - // in Object.defineProperty(). Firefox disagrees here, and actually changes - // the value. - if (callback->IsAccessorInfo()) { - return isolate->heap()->undefined_value(); - } - // Avoid redefining foreign callback as data property, just use the stored + // Avoid redefining callback as data property, just use the stored // setter to update the value instead. // TODO(mstarzinger): So far this only works if property attributes don't // change, this should be fixed once we cleanup the underlying code. - if (callback->IsForeign() && lookup.GetAttributes() == attr) { - Handle<Object> result_object = + ASSERT(!callback->IsForeign()); + if (callback->IsAccessorInfo() && + lookup.GetAttributes() == attr) { + Handle<Object> result_object; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result_object, JSObject::SetPropertyWithCallback(js_object, callback, name, obj_value, handle(lookup.holder()), - STRICT); - RETURN_IF_EMPTY_HANDLE(isolate, result_object); + STRICT)); return *result_object; } } @@ -5235,57 +5279,40 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { JSObject::NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0); // Use IgnoreAttributes version since a readonly property may be // overridden and SetProperty does not allow this. - Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes( - js_object, name, obj_value, attr); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetLocalPropertyIgnoreAttributes( + js_object, name, obj_value, attr)); return *result; } - Handle<Object> result = Runtime::ForceSetObjectProperty(isolate, js_object, - name, - obj_value, - attr); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::ForceSetObjectProperty( + js_object, name, obj_value, attr, + JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED)); return *result; } // Return property without being observable by accessors or interceptors. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDataProperty) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_GetDataProperty) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); CONVERT_ARG_HANDLE_CHECKED(Name, key, 1); - LookupResult lookup(isolate); - object->LookupRealNamedProperty(*key, &lookup); - if (!lookup.IsFound()) return isolate->heap()->undefined_value(); - switch (lookup.type()) { - case NORMAL: - return lookup.holder()->GetNormalizedProperty(&lookup); - case FIELD: - return lookup.holder()->FastPropertyAt( - lookup.representation(), - lookup.GetFieldIndex().field_index()); - case CONSTANT: - return lookup.GetConstant(); - case CALLBACKS: - case HANDLER: - case INTERCEPTOR: - case TRANSITION: - return isolate->heap()->undefined_value(); - case NONEXISTENT: - UNREACHABLE(); - } - return isolate->heap()->undefined_value(); + return *JSObject::GetDataProperty(object, key); } -Handle<Object> Runtime::SetObjectProperty(Isolate* isolate, - Handle<Object> object, - Handle<Object> key, - Handle<Object> value, - PropertyAttributes attr, - StrictMode strict_mode) { +MaybeHandle<Object> Runtime::SetObjectProperty(Isolate* isolate, + Handle<Object> object, + Handle<Object> key, + Handle<Object> value, + PropertyAttributes attr, + StrictMode strict_mode) { SetPropertyMode set_mode = attr == NONE ? SET_PROPERTY : DEFINE_PROPERTY; if (object->IsUndefined() || object->IsNull()) { @@ -5293,15 +5320,17 @@ Handle<Object> Runtime::SetObjectProperty(Isolate* isolate, Handle<Object> error = isolate->factory()->NewTypeError("non_object_property_store", HandleVector(args, 2)); - isolate->Throw(*error); - return Handle<Object>(); + return isolate->Throw<Object>(error); } if (object->IsJSProxy()) { - bool has_pending_exception = false; - Handle<Object> name_object = key->IsSymbol() - ? key : Execution::ToString(isolate, key, &has_pending_exception); - if (has_pending_exception) return Handle<Object>(); // exception + Handle<Object> name_object; + if (key->IsSymbol()) { + name_object = key; + } else { + ASSIGN_RETURN_ON_EXCEPTION( + isolate, name_object, Execution::ToString(isolate, key), Object); + } Handle<Name> name = Handle<Name>::cast(name_object); return JSReceiver::SetProperty(Handle<JSProxy>::cast(object), name, value, attr, @@ -5327,22 +5356,19 @@ Handle<Object> Runtime::SetObjectProperty(Isolate* isolate, return value; } - js_object->ValidateElements(); + JSObject::ValidateElements(js_object); if (js_object->HasExternalArrayElements() || js_object->HasFixedTypedArrayElements()) { if (!value->IsNumber() && !value->IsUndefined()) { - bool has_exception; - Handle<Object> number = - Execution::ToNumber(isolate, value, &has_exception); - if (has_exception) return Handle<Object>(); // exception - value = number; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, Execution::ToNumber(isolate, value), Object); } } - Handle<Object> result = JSObject::SetElement(js_object, index, value, attr, - strict_mode, - true, - set_mode); - js_object->ValidateElements(); + + MaybeHandle<Object> result = JSObject::SetElement( + js_object, index, value, attr, strict_mode, true, set_mode); + JSObject::ValidateElements(js_object); + return result.is_null() ? result : value; } @@ -5351,44 +5377,40 @@ Handle<Object> Runtime::SetObjectProperty(Isolate* isolate, if (name->AsArrayIndex(&index)) { if (js_object->HasExternalArrayElements()) { if (!value->IsNumber() && !value->IsUndefined()) { - bool has_exception; - Handle<Object> number = - Execution::ToNumber(isolate, value, &has_exception); - if (has_exception) return Handle<Object>(); // exception - value = number; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, Execution::ToNumber(isolate, value), Object); } } - return JSObject::SetElement(js_object, index, value, attr, strict_mode, - true, - set_mode); + return JSObject::SetElement(js_object, index, value, attr, + strict_mode, true, set_mode); } else { - if (name->IsString()) Handle<String>::cast(name)->TryFlatten(); + if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); return JSReceiver::SetProperty(js_object, name, value, attr, strict_mode); } } // Call-back into JavaScript to convert the key to a string. - bool has_pending_exception = false; - Handle<Object> converted = - Execution::ToString(isolate, key, &has_pending_exception); - if (has_pending_exception) return Handle<Object>(); // exception + Handle<Object> converted; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, converted, Execution::ToString(isolate, key), Object); Handle<String> name = Handle<String>::cast(converted); if (name->AsArrayIndex(&index)) { - return JSObject::SetElement(js_object, index, value, attr, strict_mode, - true, - set_mode); + return JSObject::SetElement(js_object, index, value, attr, + strict_mode, true, set_mode); } else { return JSReceiver::SetProperty(js_object, name, value, attr, strict_mode); } } -Handle<Object> Runtime::ForceSetObjectProperty(Isolate* isolate, - Handle<JSObject> js_object, - Handle<Object> key, - Handle<Object> value, - PropertyAttributes attr) { +MaybeHandle<Object> Runtime::ForceSetObjectProperty( + Handle<JSObject> js_object, + Handle<Object> key, + Handle<Object> value, + PropertyAttributes attr, + JSReceiver::StoreFromKeyed store_from_keyed) { + Isolate* isolate = js_object->GetIsolate(); // Check if the given key is an array index. uint32_t index; if (key->ToArrayIndex(&index)) { @@ -5403,48 +5425,46 @@ Handle<Object> Runtime::ForceSetObjectProperty(Isolate* isolate, return value; } - return JSObject::SetElement(js_object, index, value, attr, SLOPPY, - false, - DEFINE_PROPERTY); + return JSObject::SetElement(js_object, index, value, attr, + SLOPPY, false, DEFINE_PROPERTY); } if (key->IsName()) { Handle<Name> name = Handle<Name>::cast(key); if (name->AsArrayIndex(&index)) { - return JSObject::SetElement(js_object, index, value, attr, SLOPPY, - false, - DEFINE_PROPERTY); + return JSObject::SetElement(js_object, index, value, attr, + SLOPPY, false, DEFINE_PROPERTY); } else { - if (name->IsString()) Handle<String>::cast(name)->TryFlatten(); - return JSObject::SetLocalPropertyIgnoreAttributes(js_object, name, - value, attr); + if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); + return JSObject::SetLocalPropertyIgnoreAttributes( + js_object, name, value, attr, Object::OPTIMAL_REPRESENTATION, + ALLOW_AS_CONSTANT, JSReceiver::PERFORM_EXTENSIBILITY_CHECK, + store_from_keyed); } } // Call-back into JavaScript to convert the key to a string. - bool has_pending_exception = false; - Handle<Object> converted = - Execution::ToString(isolate, key, &has_pending_exception); - if (has_pending_exception) return Handle<Object>(); // exception + Handle<Object> converted; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, converted, Execution::ToString(isolate, key), Object); Handle<String> name = Handle<String>::cast(converted); if (name->AsArrayIndex(&index)) { - return JSObject::SetElement(js_object, index, value, attr, SLOPPY, - false, - DEFINE_PROPERTY); + return JSObject::SetElement(js_object, index, value, attr, + SLOPPY, false, DEFINE_PROPERTY); } else { - return JSObject::SetLocalPropertyIgnoreAttributes(js_object, name, value, - attr); + return JSObject::SetLocalPropertyIgnoreAttributes( + js_object, name, value, attr, Object::OPTIMAL_REPRESENTATION, + ALLOW_AS_CONSTANT, JSReceiver::PERFORM_EXTENSIBILITY_CHECK, + store_from_keyed); } } -MaybeObject* Runtime::DeleteObjectProperty(Isolate* isolate, - Handle<JSReceiver> receiver, - Handle<Object> key, - JSReceiver::DeleteMode mode) { - HandleScope scope(isolate); - +MaybeHandle<Object> Runtime::DeleteObjectProperty(Isolate* isolate, + Handle<JSReceiver> receiver, + Handle<Object> key, + JSReceiver::DeleteMode mode) { // Check if the given key is an array index. uint32_t index; if (key->ToArrayIndex(&index)) { @@ -5455,12 +5475,10 @@ MaybeObject* Runtime::DeleteObjectProperty(Isolate* isolate, // underlying string does nothing with the deletion, we can ignore // such deletions. if (receiver->IsStringObjectWithCharacterAt(index)) { - return isolate->heap()->true_value(); + return isolate->factory()->true_value(); } - Handle<Object> result = JSReceiver::DeleteElement(receiver, index, mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + return JSReceiver::DeleteElement(receiver, index, mode); } Handle<Name> name; @@ -5468,21 +5486,18 @@ MaybeObject* Runtime::DeleteObjectProperty(Isolate* isolate, name = Handle<Name>::cast(key); } else { // Call-back into JavaScript to convert the key to a string. - bool has_pending_exception = false; - Handle<Object> converted = Execution::ToString( - isolate, key, &has_pending_exception); - if (has_pending_exception) return Failure::Exception(); + Handle<Object> converted; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, converted, Execution::ToString(isolate, key), Object); name = Handle<String>::cast(converted); } - if (name->IsString()) Handle<String>::cast(name)->TryFlatten(); - Handle<Object> result = JSReceiver::DeleteProperty(receiver, name, mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); - return *result; + if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); + return JSReceiver::DeleteProperty(receiver, name, mode); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHiddenProperty) { +RUNTIME_FUNCTION(Runtime_SetHiddenProperty) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 3); @@ -5493,7 +5508,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHiddenProperty) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetProperty) { +RUNTIME_FUNCTION(Runtime_SetProperty) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 4 || args.length() == 5); @@ -5513,16 +5528,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetProperty) { strict_mode = strict_mode_arg; } - Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key, - value, - attributes, - strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Runtime::SetObjectProperty( + isolate, object, key, value, attributes, strict_mode)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsKind) { +RUNTIME_FUNCTION(Runtime_TransitionElementsKind) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); @@ -5535,7 +5550,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsKind) { // Set the native flag on the function. // This is used to decide if we should transform null and undefined // into the global object when doing call and apply. -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetNativeFlag) { +RUNTIME_FUNCTION(Runtime_SetNativeFlag) { SealHandleScope shs(isolate); RUNTIME_ASSERT(args.length() == 1); @@ -5549,11 +5564,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetNativeFlag) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInlineBuiltinFlag) { +RUNTIME_FUNCTION(Runtime_SetInlineBuiltinFlag) { SealHandleScope shs(isolate); RUNTIME_ASSERT(args.length() == 1); - - Handle<Object> object = args.at<Object>(0); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); if (object->IsJSFunction()) { JSFunction* func = JSFunction::cast(*object); @@ -5563,12 +5577,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInlineBuiltinFlag) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) { +RUNTIME_FUNCTION(Runtime_StoreArrayLiteralElement) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 5); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); CONVERT_SMI_ARG_CHECKED(store_index, 1); - Handle<Object> value = args.at<Object>(2); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 3); CONVERT_SMI_ARG_CHECKED(literal_index, 4); @@ -5622,29 +5636,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) { // Check whether debugger and is about to step into the callback that is passed // to a built-in function such as Array.forEach. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugCallbackSupportsStepping) { - SealHandleScope shs(isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT +RUNTIME_FUNCTION(Runtime_DebugCallbackSupportsStepping) { + ASSERT(args.length() == 1); if (!isolate->IsDebuggerActive() || !isolate->debug()->StepInActive()) { return isolate->heap()->false_value(); } CONVERT_ARG_CHECKED(Object, callback, 0); // We do not step into the callback if it's a builtin or not even a function. - if (!callback->IsJSFunction() || JSFunction::cast(callback)->IsBuiltin()) { - return isolate->heap()->false_value(); - } - return isolate->heap()->true_value(); -#else - return isolate->heap()->false_value(); -#endif // ENABLE_DEBUGGER_SUPPORT + return isolate->heap()->ToBoolean( + callback->IsJSFunction() && !JSFunction::cast(callback)->IsBuiltin()); } // Set one shot breakpoints for the callback function that is passed to a // built-in function such as Array.forEach to enable stepping into the callback. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrepareStepInIfStepping) { - SealHandleScope shs(isolate); -#ifdef ENABLE_DEBUGGER_SUPPORT +RUNTIME_FUNCTION(Runtime_DebugPrepareStepInIfStepping) { + ASSERT(args.length() == 1); Debug* debug = isolate->debug(); if (!debug->IsStepping()) return isolate->heap()->undefined_value(); CONVERT_ARG_HANDLE_CHECKED(JSFunction, callback, 0); @@ -5654,14 +5661,33 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrepareStepInIfStepping) { // again, we need to clear the step out at this point. debug->ClearStepOut(); debug->FloodWithOneShot(callback); -#endif // ENABLE_DEBUGGER_SUPPORT + return isolate->heap()->undefined_value(); +} + + +// The argument is a closure that is kept until the epilogue is called. +// On exception, the closure is called, which returns the promise if the +// exception is considered uncaught, or undefined otherwise. +RUNTIME_FUNCTION(Runtime_DebugPromiseHandlePrologue) { + ASSERT(args.length() == 1); + HandleScope scope(isolate); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, promise_getter, 0); + isolate->debug()->PromiseHandlePrologue(promise_getter); + return isolate->heap()->undefined_value(); +} + + +RUNTIME_FUNCTION(Runtime_DebugPromiseHandleEpilogue) { + ASSERT(args.length() == 0); + SealHandleScope shs(isolate); + isolate->debug()->PromiseHandleEpilogue(); return isolate->heap()->undefined_value(); } // Set a local property, even if it is READ_ONLY. If the property does not // exist, it will be added with attributes NONE. -RUNTIME_FUNCTION(MaybeObject*, Runtime_IgnoreAttributesAndSetProperty) { +RUNTIME_FUNCTION(Runtime_IgnoreAttributesAndSetProperty) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 3 || args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); @@ -5676,14 +5702,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IgnoreAttributesAndSetProperty) { (unchecked_value & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0); attributes = static_cast<PropertyAttributes>(unchecked_value); } - Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes( - object, name, value, attributes); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetLocalPropertyIgnoreAttributes( + object, name, value, attributes)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteProperty) { +RUNTIME_FUNCTION(Runtime_DeleteProperty) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, object, 0); @@ -5691,15 +5719,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteProperty) { CONVERT_STRICT_MODE_ARG_CHECKED(strict_mode, 2); JSReceiver::DeleteMode delete_mode = strict_mode == STRICT ? JSReceiver::STRICT_DELETION : JSReceiver::NORMAL_DELETION; - Handle<Object> result = JSReceiver::DeleteProperty(object, key, delete_mode); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSReceiver::DeleteProperty(object, key, delete_mode)); return *result; } -static MaybeObject* HasLocalPropertyImplementation(Isolate* isolate, - Handle<JSObject> object, - Handle<Name> key) { +static Object* HasLocalPropertyImplementation(Isolate* isolate, + Handle<JSObject> object, + Handle<Name> key) { if (JSReceiver::HasLocalProperty(object, key)) { return isolate->heap()->true_value(); } @@ -5713,16 +5743,16 @@ static MaybeObject* HasLocalPropertyImplementation(Isolate* isolate, Handle<JSObject>::cast(proto), key); } - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return isolate->heap()->false_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_HasLocalProperty) { +RUNTIME_FUNCTION(Runtime_HasLocalProperty) { HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0) CONVERT_ARG_HANDLE_CHECKED(Name, key, 1); - Handle<Object> object = args.at<Object>(0); uint32_t index; const bool key_is_array_index = key->AsArrayIndex(&index); @@ -5737,7 +5767,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HasLocalProperty) { ASSERT(!isolate->has_scheduled_exception()); return isolate->heap()->true_value(); } else { - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); } Map* map = js_obj->map(); if (!key_is_array_index && @@ -5760,33 +5790,32 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HasLocalProperty) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_HasProperty) { +RUNTIME_FUNCTION(Runtime_HasProperty) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, receiver, 0); CONVERT_ARG_HANDLE_CHECKED(Name, key, 1); bool result = JSReceiver::HasProperty(receiver, key); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); - if (isolate->has_pending_exception()) return Failure::Exception(); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); + if (isolate->has_pending_exception()) return isolate->heap()->exception(); return isolate->heap()->ToBoolean(result); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_HasElement) { +RUNTIME_FUNCTION(Runtime_HasElement) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, receiver, 0); CONVERT_SMI_ARG_CHECKED(index, 1); bool result = JSReceiver::HasElement(receiver, index); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); - if (isolate->has_pending_exception()) return Failure::Exception(); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return isolate->heap()->ToBoolean(result); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) { +RUNTIME_FUNCTION(Runtime_IsPropertyEnumerable) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -5795,7 +5824,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) { PropertyAttributes att = JSReceiver::GetLocalPropertyAttribute(object, key); if (att == ABSENT || (att & DONT_ENUM) != 0) { - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return isolate->heap()->false_value(); } ASSERT(!isolate->has_scheduled_exception()); @@ -5803,14 +5832,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) { +RUNTIME_FUNCTION(Runtime_GetPropertyNames) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, object, 0); - bool threw = false; - Handle<JSArray> result = GetKeysFor(object, &threw); - if (threw) return Failure::Exception(); - return *result; + Handle<JSArray> result; + + isolate->counters()->for_in()->Increment(); + Handle<FixedArray> elements; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, elements, + JSReceiver::GetKeys(object, JSReceiver::INCLUDE_PROTOS)); + return *isolate->factory()->NewJSArrayWithElements(elements); } @@ -5819,7 +5852,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) { // all enumerable properties of the object and its prototypes // have none, the map of the object. This is used to speed up // the check for deletions during a for-in. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNamesFast) { +RUNTIME_FUNCTION(Runtime_GetPropertyNamesFast) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -5829,10 +5862,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNamesFast) { HandleScope scope(isolate); Handle<JSReceiver> object(raw_object); - bool threw = false; - Handle<FixedArray> content = - GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, &threw); - if (threw) return Failure::Exception(); + Handle<FixedArray> content; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, content, + JSReceiver::GetKeys(object, JSReceiver::INCLUDE_PROTOS)); // Test again, since cache may have been built by preceding call. if (object->IsSimpleEnum()) return object->map(); @@ -5859,7 +5892,7 @@ static int LocalPrototypeChainLength(JSObject* obj) { // Return the names of the local named properties. // args[0]: object // args[1]: PropertyAttributes as int -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) { +RUNTIME_FUNCTION(Runtime_GetLocalPropertyNames) { HandleScope scope(isolate); ASSERT(args.length() == 2); if (!args[0]->IsJSObject()) { @@ -5874,11 +5907,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) { if (obj->IsJSGlobalProxy()) { // Only collect names if access is permitted. if (obj->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(obj, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(obj, v8::ACCESS_KEYS); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + !isolate->MayNamedAccess( + obj, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(obj, v8::ACCESS_KEYS); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return *isolate->factory()->NewJSArray(0); } obj = Handle<JSObject>(JSObject::cast(obj->GetPrototype())); @@ -5894,11 +5926,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) { for (int i = 0; i < length; i++) { // Only collect names if access is permitted. if (jsproto->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(jsproto, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(jsproto, v8::ACCESS_KEYS); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + !isolate->MayNamedAccess( + jsproto, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(jsproto, v8::ACCESS_KEYS); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return *isolate->factory()->NewJSArray(0); } int n; @@ -5975,7 +6006,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) { // Return the names of the local indexed properties. // args[0]: object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalElementNames) { +RUNTIME_FUNCTION(Runtime_GetLocalElementNames) { HandleScope scope(isolate); ASSERT(args.length() == 1); if (!args[0]->IsJSObject()) { @@ -5992,7 +6023,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalElementNames) { // Return information on whether an object has a named or indexed interceptor. // args[0]: object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetInterceptorInfo) { +RUNTIME_FUNCTION(Runtime_GetInterceptorInfo) { HandleScope scope(isolate); ASSERT(args.length() == 1); if (!args[0]->IsJSObject()) { @@ -6010,14 +6041,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetInterceptorInfo) { // Return property names from named interceptor. // args[0]: object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetNamedInterceptorPropertyNames) { +RUNTIME_FUNCTION(Runtime_GetNamedInterceptorPropertyNames) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); if (obj->HasNamedInterceptor()) { - v8::Handle<v8::Array> result = GetKeysForNamedInterceptor(obj, obj); - if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result); + Handle<JSObject> result; + if (JSObject::GetKeysForNamedInterceptor(obj, obj).ToHandle(&result)) { + return *result; + } } return isolate->heap()->undefined_value(); } @@ -6025,33 +6058,34 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetNamedInterceptorPropertyNames) { // Return element names from indexed interceptor. // args[0]: object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetIndexedInterceptorElementNames) { +RUNTIME_FUNCTION(Runtime_GetIndexedInterceptorElementNames) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); if (obj->HasIndexedInterceptor()) { - v8::Handle<v8::Array> result = GetKeysForIndexedInterceptor(obj, obj); - if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result); + Handle<JSObject> result; + if (JSObject::GetKeysForIndexedInterceptor(obj, obj).ToHandle(&result)) { + return *result; + } } return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_LocalKeys) { +RUNTIME_FUNCTION(Runtime_LocalKeys) { HandleScope scope(isolate); - ASSERT_EQ(args.length(), 1); + ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSObject, raw_object, 0); Handle<JSObject> object(raw_object); if (object->IsJSGlobalProxy()) { // Do access checks before going to the global object. if (object->IsAccessCheckNeeded() && - !isolate->MayNamedAccessWrapper(object, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_KEYS); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + !isolate->MayNamedAccess( + object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { + isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return *isolate->factory()->NewJSArray(0); } @@ -6061,10 +6095,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LocalKeys) { object = Handle<JSObject>::cast(proto); } - bool threw = false; - Handle<FixedArray> contents = - GetKeysInFixedArrayFor(object, LOCAL_ONLY, &threw); - if (threw) return Failure::Exception(); + Handle<FixedArray> contents; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, contents, + JSReceiver::GetKeys(object, JSReceiver::LOCAL_ONLY)); // Some fast paths through GetKeysInFixedArrayFor reuse a cached // property array and since the result is mutable we have to create @@ -6088,9 +6122,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LocalKeys) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) { +RUNTIME_FUNCTION(Runtime_GetArgumentsProperty) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(Object, raw_key, 0); // Compute the frame holding the arguments. JavaScriptFrameIterator it(isolate); @@ -6103,22 +6138,25 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) { // Try to convert the key to an index. If successful and within // index return the the argument from the frame. uint32_t index; - if (args[0]->ToArrayIndex(&index) && index < n) { + if (raw_key->ToArrayIndex(&index) && index < n) { return frame->GetParameter(index); } - if (args[0]->IsSymbol()) { + HandleScope scope(isolate); + if (raw_key->IsSymbol()) { // Lookup in the initial Object.prototype object. - return isolate->initial_object_prototype()->GetProperty( - Symbol::cast(args[0])); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Object::GetProperty(isolate->initial_object_prototype(), + Handle<Symbol>::cast(raw_key))); + return *result; } // Convert the key to a string. - HandleScope scope(isolate); - bool exception = false; - Handle<Object> converted = - Execution::ToString(isolate, args.at<Object>(0), &exception); - if (exception) return Failure::Exception(); + Handle<Object> converted; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, converted, Execution::ToString(isolate, raw_key)); Handle<String> key = Handle<String>::cast(converted); // Try to convert the string key into an array index. @@ -6127,16 +6165,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) { return frame->GetParameter(index); } else { Handle<Object> initial_prototype(isolate->initial_object_prototype()); - Handle<Object> result = - Object::GetElement(isolate, initial_prototype, index); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Object::GetElement(isolate, initial_prototype, index)); return *result; } } // Handle special arguments properties. - if (key->Equals(isolate->heap()->length_string())) return Smi::FromInt(n); - if (key->Equals(isolate->heap()->callee_string())) { + if (String::Equals(isolate->factory()->length_string(), key)) { + return Smi::FromInt(n); + } + if (String::Equals(isolate->factory()->callee_string(), key)) { JSFunction* function = frame->function(); if (function->shared()->strict_mode() == STRICT) { return isolate->Throw(*isolate->factory()->NewTypeError( @@ -6146,11 +6187,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) { } // Lookup in the initial Object.prototype object. - return isolate->initial_object_prototype()->GetProperty(*key); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Object::GetProperty(isolate->initial_object_prototype(), key)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ToFastProperties) { +RUNTIME_FUNCTION(Runtime_ToFastProperties) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); @@ -6161,20 +6206,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ToFastProperties) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ToBool) { +RUNTIME_FUNCTION(Runtime_ToBool) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); + CONVERT_ARG_CHECKED(Object, object, 0); - return isolate->heap()->ToBoolean(args[0]->BooleanValue()); + return isolate->heap()->ToBoolean(object->BooleanValue()); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). // Possible optimizations: put the type string into the oddballs. -RUNTIME_FUNCTION(MaybeObject*, Runtime_Typeof) { +RUNTIME_FUNCTION(Runtime_Typeof) { SealHandleScope shs(isolate); - - Object* obj = args[0]; + ASSERT(args.length() == 1); + CONVERT_ARG_CHECKED(Object, obj, 0); if (obj->IsNumber()) return isolate->heap()->number_string(); HeapObject* heap_obj = HeapObject::cast(obj); @@ -6235,18 +6281,19 @@ static int ParseDecimalInteger(const uint8_t*s, int from, int to) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToNumber) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_StringToNumber) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(String, subject, 0); - subject->TryFlatten(); + CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); + subject = String::Flatten(subject); // Fast case: short integer or some sorts of junk values. - int len = subject->length(); if (subject->IsSeqOneByteString()) { + int len = subject->length(); if (len == 0) return Smi::FromInt(0); - uint8_t const* data = SeqOneByteString::cast(subject)->GetChars(); + DisallowHeapAllocation no_gc; + uint8_t const* data = Handle<SeqOneByteString>::cast(subject)->GetChars(); bool minus = (data[0] == '-'); int start_pos = (minus ? 1 : 0); @@ -6254,15 +6301,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToNumber) { return isolate->heap()->nan_value(); } else if (data[start_pos] > '9') { // Fast check for a junk value. A valid string may start from a - // whitespace, a sign ('+' or '-'), the decimal point, a decimal digit or - // the 'I' character ('Infinity'). All of that have codes not greater than - // '9' except 'I' and . + // whitespace, a sign ('+' or '-'), the decimal point, a decimal digit + // or the 'I' character ('Infinity'). All of that have codes not greater + // than '9' except 'I' and . if (data[start_pos] != 'I' && data[start_pos] != 0xa0) { return isolate->heap()->nan_value(); } } else if (len - start_pos < 10 && AreDigits(data, start_pos, len)) { - // The maximal/minimal smi has 10 digits. If the string has less digits we - // know it will fit into the smi-data type. + // The maximal/minimal smi has 10 digits. If the string has less digits + // we know it will fit into the smi-data type. int d = ParseDecimalInteger(data, start_pos, len); if (minus) { if (d == 0) return isolate->heap()->minus_zero_value(); @@ -6291,98 +6338,132 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToNumber) { // Type", https://bugs.ecmascript.org/show_bug.cgi?id=1584 flags |= ALLOW_OCTAL | ALLOW_BINARY; } - return isolate->heap()->NumberFromDouble( - StringToDouble(isolate->unicode_cache(), subject, flags)); + + return *isolate->factory()->NewNumber(StringToDouble( + isolate->unicode_cache(), *subject, flags)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NewString) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NewString) { + HandleScope scope(isolate); + ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(length, 0); CONVERT_BOOLEAN_ARG_CHECKED(is_one_byte, 1); if (length == 0) return isolate->heap()->empty_string(); + Handle<String> result; if (is_one_byte) { - return isolate->heap()->AllocateRawOneByteString(length); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, isolate->factory()->NewRawOneByteString(length)); } else { - return isolate->heap()->AllocateRawTwoByteString(length); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, isolate->factory()->NewRawTwoByteString(length)); } + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TruncateString) { +RUNTIME_FUNCTION(Runtime_TruncateString) { HandleScope scope(isolate); + ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(SeqString, string, 0); CONVERT_SMI_ARG_CHECKED(new_length, 1); + RUNTIME_ASSERT(new_length >= 0); return *SeqString::Truncate(string, new_length); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_URIEscape) { +RUNTIME_FUNCTION(Runtime_URIEscape) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, source, 0); - Handle<String> string = FlattenGetString(source); + Handle<String> string = String::Flatten(source); ASSERT(string->IsFlat()); - Handle<String> result = string->IsOneByteRepresentationUnderneath() - ? URIEscape::Escape<uint8_t>(isolate, source) - : URIEscape::Escape<uc16>(isolate, source); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<String> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + string->IsOneByteRepresentationUnderneath() + ? URIEscape::Escape<uint8_t>(isolate, source) + : URIEscape::Escape<uc16>(isolate, source)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_URIUnescape) { +RUNTIME_FUNCTION(Runtime_URIUnescape) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, source, 0); - Handle<String> string = FlattenGetString(source); + Handle<String> string = String::Flatten(source); ASSERT(string->IsFlat()); - return string->IsOneByteRepresentationUnderneath() - ? *URIUnescape::Unescape<uint8_t>(isolate, source) - : *URIUnescape::Unescape<uc16>(isolate, source); + Handle<String> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + string->IsOneByteRepresentationUnderneath() + ? URIUnescape::Unescape<uint8_t>(isolate, source) + : URIUnescape::Unescape<uc16>(isolate, source)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONString) { +RUNTIME_FUNCTION(Runtime_QuoteJSONString) { HandleScope scope(isolate); CONVERT_ARG_HANDLE_CHECKED(String, string, 0); ASSERT(args.length() == 1); - return BasicJsonStringifier::StringifyString(isolate, string); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, BasicJsonStringifier::StringifyString(isolate, string)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BasicJSONStringify) { +RUNTIME_FUNCTION(Runtime_BasicJSONStringify) { HandleScope scope(isolate); ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); BasicJsonStringifier stringifier(isolate); - return stringifier.Stringify(Handle<Object>(args[0], isolate)); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, stringifier.Stringify(object)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseInt) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_StringParseInt) { + HandleScope handle_scope(isolate); + ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); + CONVERT_NUMBER_CHECKED(int, radix, Int32, args[1]); + RUNTIME_ASSERT(radix == 0 || (2 <= radix && radix <= 36)); - CONVERT_ARG_CHECKED(String, s, 0); - CONVERT_SMI_ARG_CHECKED(radix, 1); + subject = String::Flatten(subject); + double value; - s->TryFlatten(); + { DisallowHeapAllocation no_gc; + String::FlatContent flat = subject->GetFlatContent(); - RUNTIME_ASSERT(radix == 0 || (2 <= radix && radix <= 36)); - double value = StringToInt(isolate->unicode_cache(), s, radix); - return isolate->heap()->NumberFromDouble(value); + // ECMA-262 section 15.1.2.3, empty string is NaN + if (flat.IsAscii()) { + value = StringToInt( + isolate->unicode_cache(), flat.ToOneByteVector(), radix); + } else { + value = StringToInt( + isolate->unicode_cache(), flat.ToUC16Vector(), radix); + } + } + + return *isolate->factory()->NewNumber(value); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseFloat) { - SealHandleScope shs(isolate); - CONVERT_ARG_CHECKED(String, str, 0); +RUNTIME_FUNCTION(Runtime_StringParseFloat) { + HandleScope shs(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); - // ECMA-262 section 15.1.2.3, empty string is NaN - double value = StringToDouble(isolate->unicode_cache(), - str, ALLOW_TRAILING_JUNK, OS::nan_value()); + subject = String::Flatten(subject); + double value = StringToDouble( + isolate->unicode_cache(), *subject, ALLOW_TRAILING_JUNK, OS::nan_value()); - // Create a number object from the value. - return isolate->heap()->NumberFromDouble(value); + return *isolate->factory()->NewNumber(value); } @@ -6396,7 +6477,7 @@ static inline bool ToUpperOverflows(uc32 character) { template <class Converter> -MUST_USE_RESULT static MaybeObject* ConvertCaseHelper( +MUST_USE_RESULT static Object* ConvertCaseHelper( Isolate* isolate, String* string, SeqString* result, @@ -6619,13 +6700,11 @@ static bool FastAsciiConvert(char* dst, template <class Converter> -MUST_USE_RESULT static MaybeObject* ConvertCase( - Arguments args, +MUST_USE_RESULT static Object* ConvertCase( + Handle<String> s, Isolate* isolate, unibrow::Mapping<Converter, 128>* mapping) { - HandleScope handle_scope(isolate); - CONVERT_ARG_HANDLE_CHECKED(String, s, 0); - s = FlattenGetString(s); + s = String::Flatten(s); int length = s->length(); // Assume that the string is not empty; we need this assumption later if (length == 0) return *s; @@ -6637,9 +6716,9 @@ MUST_USE_RESULT static MaybeObject* ConvertCase( // might break in the future if we implement more context and locale // dependent upper/lower conversions. if (s->IsOneByteRepresentationUnderneath()) { + // Same length as input. Handle<SeqOneByteString> result = - isolate->factory()->NewRawOneByteString(length); - ASSERT(!result.is_null()); // Same length as input. + isolate->factory()->NewRawOneByteString(length).ToHandleChecked(); DisallowHeapAllocation no_gc; String::FlatContent flat_content = s->GetFlatContent(); ASSERT(flat_content.IsFlat()); @@ -6650,48 +6729,52 @@ MUST_USE_RESULT static MaybeObject* ConvertCase( length, &has_changed_character); // If not ASCII, we discard the result and take the 2 byte path. - if (is_ascii) return has_changed_character ? *result : *s; + if (is_ascii) return has_changed_character ? *result : *s; } - Handle<SeqString> result; + Handle<SeqString> result; // Same length as input. if (s->IsOneByteRepresentation()) { - result = isolate->factory()->NewRawOneByteString(length); + result = isolate->factory()->NewRawOneByteString(length).ToHandleChecked(); } else { - result = isolate->factory()->NewRawTwoByteString(length); + result = isolate->factory()->NewRawTwoByteString(length).ToHandleChecked(); } - ASSERT(!result.is_null()); // Same length as input. - MaybeObject* maybe = ConvertCaseHelper(isolate, *s, *result, length, mapping); - Object* answer; - if (!maybe->ToObject(&answer)) return maybe; - if (answer->IsString()) return answer; + Object* answer = ConvertCaseHelper(isolate, *s, *result, length, mapping); + if (answer->IsException() || answer->IsString()) return answer; ASSERT(answer->IsSmi()); length = Smi::cast(answer)->value(); if (s->IsOneByteRepresentation() && length > 0) { - result = isolate->factory()->NewRawOneByteString(length); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, isolate->factory()->NewRawOneByteString(length)); } else { if (length < 0) length = -length; - result = isolate->factory()->NewRawTwoByteString(length); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, isolate->factory()->NewRawTwoByteString(length)); } - RETURN_IF_EMPTY_HANDLE(isolate, result); return ConvertCaseHelper(isolate, *s, *result, length, mapping); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToLowerCase) { +RUNTIME_FUNCTION(Runtime_StringToLowerCase) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(String, s, 0); return ConvertCase( - args, isolate, isolate->runtime_state()->to_lower_mapping()); + s, isolate, isolate->runtime_state()->to_lower_mapping()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToUpperCase) { +RUNTIME_FUNCTION(Runtime_StringToUpperCase) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(String, s, 0); return ConvertCase( - args, isolate, isolate->runtime_state()->to_upper_mapping()); + s, isolate, isolate->runtime_state()->to_upper_mapping()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringTrim) { +RUNTIME_FUNCTION(Runtime_StringTrim) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -6699,7 +6782,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringTrim) { CONVERT_BOOLEAN_ARG_CHECKED(trimLeft, 1); CONVERT_BOOLEAN_ARG_CHECKED(trimRight, 2); - string = FlattenGetString(string); + string = String::Flatten(string); int length = string->length(); int left = 0; @@ -6724,12 +6807,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringTrim) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) { +RUNTIME_FUNCTION(Runtime_StringSplit) { HandleScope handle_scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, subject, 0); CONVERT_ARG_HANDLE_CHECKED(String, pattern, 1); CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[2]); + RUNTIME_ASSERT(limit > 0); int subject_length = subject->length(); int pattern_length = pattern->length(); @@ -6755,7 +6839,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) { // isn't empty, we can never create more parts than ~half the length // of the subject. - if (!subject->IsFlat()) FlattenString(subject); + subject = String::Flatten(subject); + pattern = String::Flatten(pattern); static const int kMaxInitialListCapacity = 16; @@ -6764,7 +6849,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) { // Find (up to limit) indices of separator and end-of-string in subject int initial_capacity = Min<uint32_t>(kMaxInitialListCapacity, limit); ZoneList<int> indices(initial_capacity, zone_scope.zone()); - if (!pattern->IsFlat()) FlattenString(pattern); FindStringIndicesDispatch(isolate, *subject, *pattern, &indices, limit, zone_scope.zone()); @@ -6802,10 +6886,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) { if (limit == 0xffffffffu) { if (result->HasFastObjectElements()) { - RegExpResultsCache::Enter(isolate->heap(), - *subject, - *pattern, - *elements, + RegExpResultsCache::Enter(isolate, + subject, + pattern, + elements, RegExpResultsCache::STRING_SPLIT_SUBSTRINGS); } } @@ -6849,25 +6933,21 @@ static int CopyCachedAsciiCharsToArray(Heap* heap, // Converts a String to JSArray. // For example, "foo" => ["f", "o", "o"]. -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToArray) { +RUNTIME_FUNCTION(Runtime_StringToArray) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(String, s, 0); CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]); - s = FlattenGetString(s); + s = String::Flatten(s); const int length = static_cast<int>(Min<uint32_t>(s->length(), limit)); Handle<FixedArray> elements; int position = 0; if (s->IsFlat() && s->IsOneByteRepresentation()) { // Try using cached chars where possible. - Object* obj; - { MaybeObject* maybe_obj = - isolate->heap()->AllocateUninitializedFixedArray(length); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - elements = Handle<FixedArray>(FixedArray::cast(obj), isolate); + elements = isolate->factory()->NewUninitializedFixedArray(length); + DisallowHeapAllocation no_gc; String::FlatContent content = s->GetFlatContent(); if (content.IsAscii()) { @@ -6888,7 +6968,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToArray) { } for (int i = position; i < length; ++i) { Handle<Object> str = - LookupSingleCharacterStringFromCode(isolate, s->Get(i)); + isolate->factory()->LookupSingleCharacterStringFromCode(s->Get(i)); elements->set(i, *str); } @@ -6902,11 +6982,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToArray) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NewStringWrapper) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NewStringWrapper) { + HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(String, value, 0); - return value->ToObject(isolate); + CONVERT_ARG_HANDLE_CHECKED(String, value, 0); + return *Object::ToObject(isolate, value).ToHandleChecked(); } @@ -6917,91 +6997,70 @@ bool Runtime::IsUpperCaseChar(RuntimeState* runtime_state, uint16_t ch) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NumberToString) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_NumberToString) { + HandleScope scope(isolate); ASSERT(args.length() == 1); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(number, 0); - Object* number = args[0]; - RUNTIME_ASSERT(number->IsNumber()); - - return isolate->heap()->NumberToString(number); + return *isolate->factory()->NumberToString(number); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NumberToStringSkipCache) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_NumberToStringSkipCache) { + HandleScope scope(isolate); ASSERT(args.length() == 1); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(number, 0); - Object* number = args[0]; - RUNTIME_ASSERT(number->IsNumber()); - - return isolate->heap()->NumberToString(number, false); + return *isolate->factory()->NumberToString(number, false); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToInteger) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToInteger) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(number, 0); - - // We do not include 0 so that we don't have to treat +0 / -0 cases. - if (number > 0 && number <= Smi::kMaxValue) { - return Smi::FromInt(static_cast<int>(number)); - } - return isolate->heap()->NumberFromDouble(DoubleToInteger(number)); + return *isolate->factory()->NewNumber(DoubleToInteger(number)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToIntegerMapMinusZero) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToIntegerMapMinusZero) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(number, 0); - - // We do not include 0 so that we don't have to treat +0 / -0 cases. - if (number > 0 && number <= Smi::kMaxValue) { - return Smi::FromInt(static_cast<int>(number)); - } - double double_value = DoubleToInteger(number); // Map both -0 and +0 to +0. if (double_value == 0) double_value = 0; - return isolate->heap()->NumberFromDouble(double_value); + return *isolate->factory()->NewNumber(double_value); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToJSUint32) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToJSUint32) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_NUMBER_CHECKED(int32_t, number, Uint32, args[0]); - return isolate->heap()->NumberFromUint32(number); + return *isolate->factory()->NewNumberFromUint(number); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToJSInt32) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberToJSInt32) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(number, 0); - - // We do not include 0 so that we don't have to treat +0 / -0 cases. - if (number > 0 && number <= Smi::kMaxValue) { - return Smi::FromInt(static_cast<int>(number)); - } - return isolate->heap()->NumberFromInt32(DoubleToInt32(number)); + return *isolate->factory()->NewNumberFromInt(DoubleToInt32(number)); } // Converts a Number to a Smi, if possible. Returns NaN if the number is not // a small integer. -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NumberToSmi) { +RUNTIME_FUNCTION(RuntimeHidden_NumberToSmi) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - - Object* obj = args[0]; + CONVERT_ARG_CHECKED(Object, obj, 0); if (obj->IsSmi()) { return obj; } @@ -7016,101 +7075,91 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NumberToSmi) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_AllocateHeapNumber) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_AllocateHeapNumber) { + HandleScope scope(isolate); ASSERT(args.length() == 0); - return isolate->heap()->AllocateHeapNumber(0); + return *isolate->factory()->NewHeapNumber(0); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberAdd) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberAdd) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - return isolate->heap()->NumberFromDouble(x + y); + return *isolate->factory()->NewNumber(x + y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberSub) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberSub) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - return isolate->heap()->NumberFromDouble(x - y); + return *isolate->factory()->NewNumber(x - y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberMul) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberMul) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - return isolate->heap()->NumberFromDouble(x * y); + return *isolate->factory()->NewNumber(x * y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberUnaryMinus) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberUnaryMinus) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); - return isolate->heap()->NumberFromDouble(-x); + return *isolate->factory()->NewNumber(-x); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberAlloc) { - SealHandleScope shs(isolate); - ASSERT(args.length() == 0); - - return isolate->heap()->NumberFromDouble(9876543210.0); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberDiv) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberDiv) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - return isolate->heap()->NumberFromDouble(x / y); + return *isolate->factory()->NewNumber(x / y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberMod) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberMod) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - - x = modulo(x, y); - // NumberFromDouble may return a Smi instead of a Number object - return isolate->heap()->NumberFromDouble(x); + return *isolate->factory()->NewNumber(modulo(x, y)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberImul) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberImul) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(x * y); + return *isolate->factory()->NewNumberFromInt(x * y); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StringAdd) { +RUNTIME_FUNCTION(RuntimeHidden_StringAdd) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(String, str1, 0); CONVERT_ARG_HANDLE_CHECKED(String, str2, 1); isolate->counters()->string_add_runtime()->Increment(); - Handle<String> result = isolate->factory()->NewConsString(str1, str2); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<String> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, isolate->factory()->NewConsString(str1, str2)); return *result; } @@ -7120,6 +7169,7 @@ static inline void StringBuilderConcatHelper(String* special, sinkchar* sink, FixedArray* fixed_array, int array_length) { + DisallowHeapAllocation no_gc; int position = 0; for (int i = 0; i < array_length; i++) { Object* element = fixed_array->get(i); @@ -7154,36 +7204,13 @@ static inline void StringBuilderConcatHelper(String* special, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) { - HandleScope scope(isolate); - ASSERT(args.length() == 3); - CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); - if (!args[1]->IsSmi()) return isolate->ThrowInvalidStringLength(); - int array_length = args.smi_at(1); - CONVERT_ARG_HANDLE_CHECKED(String, special, 2); - - // This assumption is used by the slice encoding in one or two smis. - ASSERT(Smi::kMaxValue >= String::kMaxLength); - - JSObject::EnsureCanContainHeapObjectElements(array); - - int special_length = special->length(); - if (!array->HasFastObjectElements()) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); - } - FixedArray* fixed_array = FixedArray::cast(array->elements()); - if (fixed_array->length() < array_length) { - array_length = fixed_array->length(); - } - - if (array_length == 0) { - return isolate->heap()->empty_string(); - } else if (array_length == 1) { - Object* first = fixed_array->get(0); - if (first->IsString()) return first; - } - - bool one_byte = special->HasOnlyOneByteChars(); +// Returns the result length of the concatenation. +// On illegal argument, -1 is returned. +static inline int StringBuilderConcatLength(int special_length, + FixedArray* fixed_array, + int array_length, + bool* one_byte) { + DisallowHeapAllocation no_gc; int position = 0; for (int i = 0; i < array_length; i++) { int increment = 0; @@ -7202,76 +7229,113 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) { len = -smi_value; // Get the position and check that it is a positive smi. i++; - if (i >= array_length) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); - } + if (i >= array_length) return -1; Object* next_smi = fixed_array->get(i); - if (!next_smi->IsSmi()) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); - } + if (!next_smi->IsSmi()) return -1; pos = Smi::cast(next_smi)->value(); - if (pos < 0) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); - } + if (pos < 0) return -1; } ASSERT(pos >= 0); ASSERT(len >= 0); - if (pos > special_length || len > special_length - pos) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); - } + if (pos > special_length || len > special_length - pos) return -1; increment = len; } else if (elt->IsString()) { String* element = String::cast(elt); int element_length = element->length(); increment = element_length; - if (one_byte && !element->HasOnlyOneByteChars()) { - one_byte = false; + if (*one_byte && !element->HasOnlyOneByteChars()) { + *one_byte = false; } } else { - ASSERT(!elt->IsTheHole()); - return isolate->Throw(isolate->heap()->illegal_argument_string()); + return -1; } if (increment > String::kMaxLength - position) { - return isolate->ThrowInvalidStringLength(); + return kMaxInt; // Provoke throw on allocation. } position += increment; } + return position; +} - int length = position; - Object* object; - if (one_byte) { - { MaybeObject* maybe_object = - isolate->heap()->AllocateRawOneByteString(length); - if (!maybe_object->ToObject(&object)) return maybe_object; +RUNTIME_FUNCTION(Runtime_StringBuilderConcat) { + HandleScope scope(isolate); + ASSERT(args.length() == 3); + CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); + if (!args[1]->IsSmi()) return isolate->ThrowInvalidStringLength(); + CONVERT_SMI_ARG_CHECKED(array_length, 1); + CONVERT_ARG_HANDLE_CHECKED(String, special, 2); + + size_t actual_array_length = 0; + RUNTIME_ASSERT( + TryNumberToSize(isolate, array->length(), &actual_array_length)); + RUNTIME_ASSERT(array_length >= 0); + RUNTIME_ASSERT(static_cast<size_t>(array_length) <= actual_array_length); + + // This assumption is used by the slice encoding in one or two smis. + ASSERT(Smi::kMaxValue >= String::kMaxLength); + + RUNTIME_ASSERT(array->HasFastElements()); + JSObject::EnsureCanContainHeapObjectElements(array); + + int special_length = special->length(); + if (!array->HasFastObjectElements()) { + return isolate->Throw(isolate->heap()->illegal_argument_string()); + } + + int length; + bool one_byte = special->HasOnlyOneByteChars(); + + { DisallowHeapAllocation no_gc; + FixedArray* fixed_array = FixedArray::cast(array->elements()); + if (fixed_array->length() < array_length) { + array_length = fixed_array->length(); + } + + if (array_length == 0) { + return isolate->heap()->empty_string(); + } else if (array_length == 1) { + Object* first = fixed_array->get(0); + if (first->IsString()) return first; } - SeqOneByteString* answer = SeqOneByteString::cast(object); + length = StringBuilderConcatLength( + special_length, fixed_array, array_length, &one_byte); + } + + if (length == -1) { + return isolate->Throw(isolate->heap()->illegal_argument_string()); + } + + if (one_byte) { + Handle<SeqOneByteString> answer; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, answer, + isolate->factory()->NewRawOneByteString(length)); StringBuilderConcatHelper(*special, answer->GetChars(), - fixed_array, + FixedArray::cast(array->elements()), array_length); - return answer; + return *answer; } else { - { MaybeObject* maybe_object = - isolate->heap()->AllocateRawTwoByteString(length); - if (!maybe_object->ToObject(&object)) return maybe_object; - } - SeqTwoByteString* answer = SeqTwoByteString::cast(object); + Handle<SeqTwoByteString> answer; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, answer, + isolate->factory()->NewRawTwoByteString(length)); StringBuilderConcatHelper(*special, answer->GetChars(), - fixed_array, + FixedArray::cast(array->elements()), array_length); - return answer; + return *answer; } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) { +RUNTIME_FUNCTION(Runtime_StringBuilderJoin) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); if (!args[1]->IsSmi()) return isolate->ThrowInvalidStringLength(); - int array_length = args.smi_at(1); + CONVERT_SMI_ARG_CHECKED(array_length, 1); CONVERT_ARG_HANDLE_CHECKED(String, separator, 2); RUNTIME_ASSERT(array->HasFastObjectElements()); @@ -7289,6 +7353,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) { } int separator_length = separator->length(); + RUNTIME_ASSERT(separator_length > 0); int max_nof_separators = (String::kMaxLength + separator_length - 1) / separator_length; if (max_nof_separators < (array_length - 1)) { @@ -7308,9 +7373,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) { length += increment; } - Handle<SeqTwoByteString> answer = - isolate->factory()->NewRawTwoByteString(length); - RETURN_IF_EMPTY_HANDLE(isolate, answer); + Handle<SeqTwoByteString> answer; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, answer, + isolate->factory()->NewRawTwoByteString(length)); DisallowHeapAllocation no_gc; @@ -7349,6 +7415,7 @@ static void JoinSparseArrayWithSeparator(FixedArray* elements, uint32_t array_length, String* separator, Vector<Char> buffer) { + DisallowHeapAllocation no_gc; int previous_separator_position = 0; int separator_length = separator->length(); int cursor = 0; @@ -7384,41 +7451,48 @@ static void JoinSparseArrayWithSeparator(FixedArray* elements, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) { +RUNTIME_FUNCTION(Runtime_SparseJoinWithSeparator) { HandleScope scope(isolate); ASSERT(args.length() == 3); - CONVERT_ARG_CHECKED(JSArray, elements_array, 0); - RUNTIME_ASSERT(elements_array->HasFastSmiOrObjectElements()); + CONVERT_ARG_HANDLE_CHECKED(JSArray, elements_array, 0); CONVERT_NUMBER_CHECKED(uint32_t, array_length, Uint32, args[1]); - CONVERT_ARG_CHECKED(String, separator, 2); + CONVERT_ARG_HANDLE_CHECKED(String, separator, 2); // elements_array is fast-mode JSarray of alternating positions // (increasing order) and strings. + RUNTIME_ASSERT(elements_array->HasFastSmiOrObjectElements()); // array_length is length of original array (used to add separators); // separator is string to put between elements. Assumed to be non-empty. + RUNTIME_ASSERT(array_length > 0); // Find total length of join result. int string_length = 0; bool is_ascii = separator->IsOneByteRepresentation(); bool overflow = false; - CONVERT_NUMBER_CHECKED(int, elements_length, - Int32, elements_array->length()); + CONVERT_NUMBER_CHECKED(int, elements_length, Int32, elements_array->length()); + RUNTIME_ASSERT(elements_length <= elements_array->elements()->length()); RUNTIME_ASSERT((elements_length & 1) == 0); // Even length. FixedArray* elements = FixedArray::cast(elements_array->elements()); for (int i = 0; i < elements_length; i += 2) { RUNTIME_ASSERT(elements->get(i)->IsNumber()); RUNTIME_ASSERT(elements->get(i + 1)->IsString()); - String* string = String::cast(elements->get(i + 1)); - int length = string->length(); - if (is_ascii && !string->IsOneByteRepresentation()) { - is_ascii = false; - } - if (length > String::kMaxLength || - String::kMaxLength - length < string_length) { - overflow = true; - break; + } + + { DisallowHeapAllocation no_gc; + for (int i = 0; i < elements_length; i += 2) { + String* string = String::cast(elements->get(i + 1)); + int length = string->length(); + if (is_ascii && !string->IsOneByteRepresentation()) { + is_ascii = false; + } + if (length > String::kMaxLength || + String::kMaxLength - length < string_length) { + overflow = true; + break; + } + string_length += length; } - string_length += length; } + int separator_length = separator->length(); if (!overflow && separator_length > 0) { if (array_length <= 0x7fffffffu) { @@ -7445,97 +7519,91 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) { } if (is_ascii) { - MaybeObject* result_allocation = - isolate->heap()->AllocateRawOneByteString(string_length); - if (result_allocation->IsFailure()) return result_allocation; - SeqOneByteString* result_string = - SeqOneByteString::cast(result_allocation->ToObjectUnchecked()); - JoinSparseArrayWithSeparator<uint8_t>(elements, - elements_length, - array_length, - separator, - Vector<uint8_t>( - result_string->GetChars(), - string_length)); - return result_string; + Handle<SeqOneByteString> result = isolate->factory()->NewRawOneByteString( + string_length).ToHandleChecked(); + JoinSparseArrayWithSeparator<uint8_t>( + FixedArray::cast(elements_array->elements()), + elements_length, + array_length, + *separator, + Vector<uint8_t>(result->GetChars(), string_length)); + return *result; } else { - MaybeObject* result_allocation = - isolate->heap()->AllocateRawTwoByteString(string_length); - if (result_allocation->IsFailure()) return result_allocation; - SeqTwoByteString* result_string = - SeqTwoByteString::cast(result_allocation->ToObjectUnchecked()); - JoinSparseArrayWithSeparator<uc16>(elements, - elements_length, - array_length, - separator, - Vector<uc16>(result_string->GetChars(), - string_length)); - return result_string; + Handle<SeqTwoByteString> result = isolate->factory()->NewRawTwoByteString( + string_length).ToHandleChecked(); + JoinSparseArrayWithSeparator<uc16>( + FixedArray::cast(elements_array->elements()), + elements_length, + array_length, + *separator, + Vector<uc16>(result->GetChars(), string_length)); + return *result; } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberOr) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberOr) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(x | y); + return *isolate->factory()->NewNumberFromInt(x | y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberAnd) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberAnd) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(x & y); + return *isolate->factory()->NewNumberFromInt(x & y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberXor) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberXor) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(x ^ y); + return *isolate->factory()->NewNumberFromInt(x ^ y); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberShl) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberShl) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(x << (y & 0x1f)); + return *isolate->factory()->NewNumberFromInt(x << (y & 0x1f)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberShr) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberShr) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(uint32_t, x, Uint32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromUint32(x >> (y & 0x1f)); + return *isolate->factory()->NewNumberFromUint(x >> (y & 0x1f)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberSar) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_NumberSar) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]); CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]); - return isolate->heap()->NumberFromInt32(ArithmeticShiftRight(x, y & 0x1f)); + return *isolate->factory()->NewNumberFromInt( + ArithmeticShiftRight(x, y & 0x1f)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberEquals) { +RUNTIME_FUNCTION(Runtime_NumberEquals) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); @@ -7554,14 +7622,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberEquals) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringEquals) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_StringEquals) { + HandleScope handle_scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(String, x, 0); - CONVERT_ARG_CHECKED(String, y, 1); + CONVERT_ARG_HANDLE_CHECKED(String, x, 0); + CONVERT_ARG_HANDLE_CHECKED(String, y, 1); - bool not_equal = !x->Equals(y); + bool not_equal = !String::Equals(x, y); // This is slightly convoluted because the value that signifies // equality is 0 and inequality is 1 so we have to negate the result // from String::Equals. @@ -7572,13 +7640,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringEquals) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberCompare) { +RUNTIME_FUNCTION(Runtime_NumberCompare) { SealHandleScope shs(isolate); ASSERT(args.length() == 3); CONVERT_DOUBLE_ARG_CHECKED(x, 0); CONVERT_DOUBLE_ARG_CHECKED(y, 1); - if (std::isnan(x) || std::isnan(y)) return args[2]; + CONVERT_ARG_HANDLE_CHECKED(Object, uncomparable_result, 2) + if (std::isnan(x) || std::isnan(y)) return *uncomparable_result; if (x == y) return Smi::FromInt(EQUAL); if (isless(x, y)) return Smi::FromInt(LESS); return Smi::FromInt(GREATER); @@ -7587,7 +7656,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberCompare) { // Compare two Smis as if they were converted to strings and then // compared lexicographically. -RUNTIME_FUNCTION(MaybeObject*, Runtime_SmiLexicographicCompare) { +RUNTIME_FUNCTION(Runtime_SmiLexicographicCompare) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(x_value, 0); @@ -7662,27 +7731,33 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SmiLexicographicCompare) { } -static Object* StringCharacterStreamCompare(RuntimeState* state, - String* x, - String* y) { - StringCharacterStream stream_x(x, state->string_iterator_compare_x()); - StringCharacterStream stream_y(y, state->string_iterator_compare_y()); - while (stream_x.HasMore() && stream_y.HasMore()) { - int d = stream_x.GetNext() - stream_y.GetNext(); - if (d < 0) return Smi::FromInt(LESS); - else if (d > 0) return Smi::FromInt(GREATER); +RUNTIME_FUNCTION(RuntimeHidden_StringCompare) { + HandleScope handle_scope(isolate); + ASSERT(args.length() == 2); + + CONVERT_ARG_HANDLE_CHECKED(String, x, 0); + CONVERT_ARG_HANDLE_CHECKED(String, y, 1); + + isolate->counters()->string_compare_runtime()->Increment(); + + // A few fast case tests before we flatten. + if (x.is_identical_to(y)) return Smi::FromInt(EQUAL); + if (y->length() == 0) { + if (x->length() == 0) return Smi::FromInt(EQUAL); + return Smi::FromInt(GREATER); + } else if (x->length() == 0) { + return Smi::FromInt(LESS); } - // x is (non-trivial) prefix of y: - if (stream_y.HasMore()) return Smi::FromInt(LESS); - // y is prefix of x: - return Smi::FromInt(stream_x.HasMore() ? GREATER : EQUAL); -} + int d = x->Get(0) - y->Get(0); + if (d < 0) return Smi::FromInt(LESS); + else if (d > 0) return Smi::FromInt(GREATER); + // Slow case. + x = String::Flatten(x); + y = String::Flatten(y); -static Object* FlatStringCompare(String* x, String* y) { - ASSERT(x->IsFlat()); - ASSERT(y->IsFlat()); + DisallowHeapAllocation no_gc; Object* equal_prefix_result = Smi::FromInt(EQUAL); int prefix_length = x->length(); if (y->length() < prefix_length) { @@ -7692,7 +7767,6 @@ static Object* FlatStringCompare(String* x, String* y) { equal_prefix_result = Smi::FromInt(LESS); } int r; - DisallowHeapAllocation no_gc; String::FlatContent x_content = x->GetFlatContent(); String::FlatContent y_content = y->GetFlatContent(); if (x_content.IsAscii()) { @@ -7720,97 +7794,60 @@ static Object* FlatStringCompare(String* x, String* y) { } else { result = (r < 0) ? Smi::FromInt(LESS) : Smi::FromInt(GREATER); } - ASSERT(result == - StringCharacterStreamCompare(x->GetIsolate()->runtime_state(), x, y)); return result; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StringCompare) { - SealHandleScope shs(isolate); - ASSERT(args.length() == 2); - - CONVERT_ARG_CHECKED(String, x, 0); - CONVERT_ARG_CHECKED(String, y, 1); - - isolate->counters()->string_compare_runtime()->Increment(); - - // A few fast case tests before we flatten. - if (x == y) return Smi::FromInt(EQUAL); - if (y->length() == 0) { - if (x->length() == 0) return Smi::FromInt(EQUAL); - return Smi::FromInt(GREATER); - } else if (x->length() == 0) { - return Smi::FromInt(LESS); - } - - int d = x->Get(0) - y->Get(0); - if (d < 0) return Smi::FromInt(LESS); - else if (d > 0) return Smi::FromInt(GREATER); - - Object* obj; - { MaybeObject* maybe_obj = isolate->heap()->PrepareForCompare(x); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - { MaybeObject* maybe_obj = isolate->heap()->PrepareForCompare(y); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - - return (x->IsFlat() && y->IsFlat()) ? FlatStringCompare(x, y) - : StringCharacterStreamCompare(isolate->runtime_state(), x, y); -} - - -#define RUNTIME_UNARY_MATH(NAME) \ -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_##NAME) { \ - SealHandleScope shs(isolate); \ +#define RUNTIME_UNARY_MATH(Name, name) \ +RUNTIME_FUNCTION(Runtime_Math##Name) { \ + HandleScope scope(isolate); \ ASSERT(args.length() == 1); \ - isolate->counters()->math_##NAME()->Increment(); \ + isolate->counters()->math_##name()->Increment(); \ CONVERT_DOUBLE_ARG_CHECKED(x, 0); \ - return isolate->heap()->AllocateHeapNumber(std::NAME(x)); \ + return *isolate->factory()->NewHeapNumber(std::name(x)); \ } -RUNTIME_UNARY_MATH(acos) -RUNTIME_UNARY_MATH(asin) -RUNTIME_UNARY_MATH(atan) -RUNTIME_UNARY_MATH(log) +RUNTIME_UNARY_MATH(Acos, acos) +RUNTIME_UNARY_MATH(Asin, asin) +RUNTIME_UNARY_MATH(Atan, atan) +RUNTIME_UNARY_MATH(Log, log) #undef RUNTIME_UNARY_MATH -RUNTIME_FUNCTION(MaybeObject*, Runtime_DoubleHi) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DoubleHi) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); uint64_t integer = double_to_uint64(x); integer = (integer >> 32) & 0xFFFFFFFFu; - return isolate->heap()->NumberFromDouble(static_cast<int32_t>(integer)); + return *isolate->factory()->NewNumber(static_cast<int32_t>(integer)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DoubleLo) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DoubleLo) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); - return isolate->heap()->NumberFromDouble( + return *isolate->factory()->NewNumber( static_cast<int32_t>(double_to_uint64(x) & 0xFFFFFFFFu)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ConstructDouble) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_ConstructDouble) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_NUMBER_CHECKED(uint32_t, hi, Uint32, args[0]); CONVERT_NUMBER_CHECKED(uint32_t, lo, Uint32, args[1]); uint64_t result = (static_cast<uint64_t>(hi) << 32) | lo; - return isolate->heap()->AllocateHeapNumber(uint64_to_double(result)); + return *isolate->factory()->NewNumber(uint64_to_double(result)); } static const double kPiDividedBy4 = 0.78539816339744830962; -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_atan2) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MathAtan2) { + HandleScope scope(isolate); ASSERT(args.length() == 2); isolate->counters()->math_atan2()->Increment(); @@ -7828,35 +7865,35 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_atan2) { } else { result = std::atan2(x, y); } - return isolate->heap()->AllocateHeapNumber(result); + return *isolate->factory()->NewNumber(result); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_exp) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MathExp) { + HandleScope scope(isolate); ASSERT(args.length() == 1); isolate->counters()->math_exp()->Increment(); CONVERT_DOUBLE_ARG_CHECKED(x, 0); lazily_initialize_fast_exp(); - return isolate->heap()->NumberFromDouble(fast_exp(x)); + return *isolate->factory()->NewNumber(fast_exp(x)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_floor) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MathFloor) { + HandleScope scope(isolate); ASSERT(args.length() == 1); isolate->counters()->math_floor()->Increment(); CONVERT_DOUBLE_ARG_CHECKED(x, 0); - return isolate->heap()->NumberFromDouble(std::floor(x)); + return *isolate->factory()->NewNumber(std::floor(x)); } // Slow version of Math.pow. We check for fast paths for special cases. // Used if SSE2/VFP3 is not available. -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_MathPowSlow) { + HandleScope scope(isolate); ASSERT(args.length() == 2); isolate->counters()->math_pow()->Increment(); @@ -7866,20 +7903,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow) { // custom powi() function than the generic pow(). if (args[1]->IsSmi()) { int y = args.smi_at(1); - return isolate->heap()->NumberFromDouble(power_double_int(x, y)); + return *isolate->factory()->NewNumber(power_double_int(x, y)); } CONVERT_DOUBLE_ARG_CHECKED(y, 1); double result = power_helper(x, y); if (std::isnan(result)) return isolate->heap()->nan_value(); - return isolate->heap()->AllocateHeapNumber(result); + return *isolate->factory()->NewNumber(result); } // Fast version of Math.pow if we know that y is not an integer and y is not // -0.5 or 0.5. Used as slow case from full codegen. -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow_cfunction) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_MathPow) { + HandleScope scope(isolate); ASSERT(args.length() == 2); isolate->counters()->math_pow()->Increment(); @@ -7890,23 +7927,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow_cfunction) { } else { double result = power_double_double(x, y); if (std::isnan(result)) return isolate->heap()->nan_value(); - return isolate->heap()->AllocateHeapNumber(result); + return *isolate->factory()->NewNumber(result); } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_RoundNumber) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_RoundNumber) { + HandleScope scope(isolate); ASSERT(args.length() == 1); + CONVERT_NUMBER_ARG_HANDLE_CHECKED(input, 0); isolate->counters()->math_round()->Increment(); - if (!args[0]->IsHeapNumber()) { - // Must be smi. Return the argument unchanged for all the other types - // to make fuzz-natives test happy. - return args[0]; + if (!input->IsHeapNumber()) { + ASSERT(input->IsSmi()); + return *input; } - HeapNumber* number = reinterpret_cast<HeapNumber*>(args[0]); + Handle<HeapNumber> number = Handle<HeapNumber>::cast(input); double value = number->value(); int exponent = number->get_exponent(); @@ -7928,48 +7965,50 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RoundNumber) { // If the magnitude is big enough, there's no place for fraction part. If we // try to add 0.5 to this number, 1.0 will be added instead. if (exponent >= 52) { - return number; + return *number; } if (sign && value >= -0.5) return isolate->heap()->minus_zero_value(); // Do not call NumberFromDouble() to avoid extra checks. - return isolate->heap()->AllocateHeapNumber(std::floor(value + 0.5)); + return *isolate->factory()->NewNumber(std::floor(value + 0.5)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_sqrt) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MathSqrt) { + HandleScope scope(isolate); ASSERT(args.length() == 1); isolate->counters()->math_sqrt()->Increment(); CONVERT_DOUBLE_ARG_CHECKED(x, 0); - return isolate->heap()->AllocateHeapNumber(fast_sqrt(x)); + return *isolate->factory()->NewNumber(fast_sqrt(x)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_fround) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MathFround) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); float xf = static_cast<float>(x); - return isolate->heap()->AllocateHeapNumber(xf); + return *isolate->factory()->NewNumber(xf); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateMakeDay) { +RUNTIME_FUNCTION(Runtime_DateMakeDay) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(year, 0); CONVERT_SMI_ARG_CHECKED(month, 1); - return Smi::FromInt(isolate->date_cache()->DaysFromYearMonth(year, month)); + int days = isolate->date_cache()->DaysFromYearMonth(year, month); + RUNTIME_ASSERT(Smi::IsValid(days)); + return Smi::FromInt(days); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateSetValue) { +RUNTIME_FUNCTION(Runtime_DateSetValue) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -7979,40 +8018,38 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateSetValue) { DateCache* date_cache = isolate->date_cache(); - Object* value = NULL; + Handle<Object> value;; bool is_value_nan = false; if (std::isnan(time)) { - value = isolate->heap()->nan_value(); + value = isolate->factory()->nan_value(); is_value_nan = true; } else if (!is_utc && (time < -DateCache::kMaxTimeBeforeUTCInMs || time > DateCache::kMaxTimeBeforeUTCInMs)) { - value = isolate->heap()->nan_value(); + value = isolate->factory()->nan_value(); is_value_nan = true; } else { time = is_utc ? time : date_cache->ToUTC(static_cast<int64_t>(time)); if (time < -DateCache::kMaxTimeInMs || time > DateCache::kMaxTimeInMs) { - value = isolate->heap()->nan_value(); + value = isolate->factory()->nan_value(); is_value_nan = true; } else { - MaybeObject* maybe_result = - isolate->heap()->AllocateHeapNumber(DoubleToInteger(time)); - if (!maybe_result->ToObject(&value)) return maybe_result; + value = isolate->factory()->NewNumber(DoubleToInteger(time)); } } - date->SetValue(value, is_value_nan); - return value; + date->SetValue(*value, is_value_nan); + return *value; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewArgumentsFast) { +RUNTIME_FUNCTION(RuntimeHidden_NewArgumentsFast) { HandleScope scope(isolate); ASSERT(args.length() == 3); - Handle<JSFunction> callee = args.at<JSFunction>(0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, callee, 0); Object** parameters = reinterpret_cast<Object**>(args[1]); - const int argument_count = Smi::cast(args[2])->value(); + CONVERT_SMI_ARG_CHECKED(argument_count, 2); Handle<JSObject> result = isolate->factory()->NewArgumentsObject(callee, argument_count); @@ -8026,11 +8063,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewArgumentsFast) { parameter_map->set_map( isolate->heap()->sloppy_arguments_elements_map()); - Handle<Map> old_map(result->map()); - Handle<Map> new_map = isolate->factory()->CopyMap(old_map); - new_map->set_elements_kind(SLOPPY_ARGUMENTS_ELEMENTS); + Handle<Map> map = Map::Copy(handle(result->map())); + map->set_elements_kind(SLOPPY_ARGUMENTS_ELEMENTS); - result->set_map(*new_map); + result->set_map(*map); result->set_elements(*parameter_map); // Store the context and the arguments array at the beginning of the @@ -8101,54 +8137,42 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewArgumentsFast) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewStrictArgumentsFast) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_NewStrictArgumentsFast) { + HandleScope scope(isolate); ASSERT(args.length() == 3); - - JSFunction* callee = JSFunction::cast(args[0]); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, callee, 0) Object** parameters = reinterpret_cast<Object**>(args[1]); - const int length = args.smi_at(2); + CONVERT_SMI_ARG_CHECKED(length, 2); - Object* result; - { MaybeObject* maybe_result = - isolate->heap()->AllocateArgumentsObject(callee, length); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - // Allocate the elements if needed. - if (length > 0) { - // Allocate the fixed array. - FixedArray* array; - { MaybeObject* maybe_obj = - isolate->heap()->AllocateUninitializedFixedArray(length); - if (!maybe_obj->To(&array)) return maybe_obj; - } + Handle<JSObject> result = + isolate->factory()->NewArgumentsObject(callee, length); + if (length > 0) { + Handle<FixedArray> array = + isolate->factory()->NewUninitializedFixedArray(length); DisallowHeapAllocation no_gc; WriteBarrierMode mode = array->GetWriteBarrierMode(no_gc); for (int i = 0; i < length; i++) { array->set(i, *--parameters, mode); } - JSObject::cast(result)->set_elements(array); + result->set_elements(*array); } - return result; + return *result; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewClosureFromStubFailure) { +RUNTIME_FUNCTION(RuntimeHidden_NewClosureFromStubFailure) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared, 0); Handle<Context> context(isolate->context()); PretenureFlag pretenure_flag = NOT_TENURED; - Handle<JSFunction> result = - isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, - context, - pretenure_flag); - return *result; + return *isolate->factory()->NewFunctionFromSharedFunctionInfo( + shared, context, pretenure_flag); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewClosure) { +RUNTIME_FUNCTION(RuntimeHidden_NewClosure) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(Context, context, 0); @@ -8158,11 +8182,8 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewClosure) { // The caller ensures that we pretenure closures that are assigned // directly to properties. PretenureFlag pretenure_flag = pretenure ? TENURED : NOT_TENURED; - Handle<JSFunction> result = - isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, - context, - pretenure_flag); - return *result; + return *isolate->factory()->NewFunctionFromSharedFunctionInfo( + shared, context, pretenure_flag); } @@ -8216,7 +8237,7 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments( } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) { +RUNTIME_FUNCTION(Runtime_FunctionBindArguments) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSFunction, bound_function, 0); @@ -8231,10 +8252,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) { GetCallerArguments(isolate, 0, &argc); // Don't count the this-arg. if (argc > 0) { - ASSERT(*arguments[0] == args[2]); + RUNTIME_ASSERT(*arguments[0] == args[2]); argc--; } else { - ASSERT(args[2]->IsUndefined()); + RUNTIME_ASSERT(args[2]->IsUndefined()); } // Initialize array of bindings (function, this, and any existing arguments // if the function was already bound). @@ -8243,6 +8264,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) { if (bindee->IsJSFunction() && JSFunction::cast(*bindee)->shared()->bound()) { Handle<FixedArray> old_bindings( JSFunction::cast(*bindee)->function_bindings()); + RUNTIME_ASSERT(old_bindings->length() > JSFunction::kBoundFunctionIndex); new_bindings = isolate->factory()->NewFixedArray(old_bindings->length() + argc); bindee = Handle<Object>(old_bindings->get(JSFunction::kBoundFunctionIndex), @@ -8271,12 +8293,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) { Handle<Object> new_length(args.at<Object>(3)); PropertyAttributes attr = static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY); - ForceSetProperty(bound_function, length_string, new_length, attr); + Runtime::ForceSetObjectProperty( + bound_function, length_string, new_length, attr).Assert(); return *bound_function; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) { +RUNTIME_FUNCTION(Runtime_BoundFunctionGetBindings) { HandleScope handles(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, callable, 0); @@ -8284,7 +8307,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) { Handle<JSFunction> function = Handle<JSFunction>::cast(callable); if (function->shared()->bound()) { Handle<FixedArray> bindings(function->function_bindings()); - ASSERT(bindings->map() == isolate->heap()->fixed_cow_array_map()); + RUNTIME_ASSERT(bindings->map() == isolate->heap()->fixed_cow_array_map()); return *isolate->factory()->NewJSArrayWithElements(bindings); } } @@ -8292,7 +8315,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) { +RUNTIME_FUNCTION(Runtime_NewObjectFromBound) { HandleScope scope(isolate); ASSERT(args.length() == 1); // First argument is a function to use as a constructor. @@ -8319,27 +8342,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) { } if (!bound_function->IsJSFunction()) { - bool exception_thrown; - bound_function = Execution::TryGetConstructorDelegate(isolate, - bound_function, - &exception_thrown); - if (exception_thrown) return Failure::Exception(); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, bound_function, + Execution::TryGetConstructorDelegate(isolate, bound_function)); } ASSERT(bound_function->IsJSFunction()); - bool exception = false; - Handle<Object> result = + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, Execution::New(Handle<JSFunction>::cast(bound_function), - total_argc, param_data.get(), &exception); - if (exception) { - return Failure::Exception(); - } - ASSERT(!result.is_null()); + total_argc, param_data.get())); return *result; } -static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate, +static Object* Runtime_NewObjectHelper(Isolate* isolate, Handle<Object> constructor, Handle<AllocationSite> site) { // If the constructor isn't a proper function we throw a type error. @@ -8361,13 +8379,11 @@ static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate, return isolate->Throw(*type_error); } -#ifdef ENABLE_DEBUGGER_SUPPORT Debug* debug = isolate->debug(); // Handle stepping into constructors if step into is active. if (debug->StepInActive()) { debug->HandleStepIn(function, Handle<Object>::null(), 0, true); } -#endif if (function->has_initial_map()) { if (function->initial_map()->instance_type() == JS_FUNCTION_TYPE) { @@ -8405,7 +8421,6 @@ static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate, } else { result = isolate->factory()->NewJSObjectWithMemento(function, site); } - RETURN_IF_EMPTY_HANDLE(isolate, result); isolate->counters()->constructed_objects()->Increment(); isolate->counters()->constructed_objects_runtime()->Increment(); @@ -8414,35 +8429,31 @@ static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewObject) { +RUNTIME_FUNCTION(RuntimeHidden_NewObject) { HandleScope scope(isolate); ASSERT(args.length() == 1); - - Handle<Object> constructor = args.at<Object>(0); + CONVERT_ARG_HANDLE_CHECKED(Object, constructor, 0); return Runtime_NewObjectHelper(isolate, constructor, Handle<AllocationSite>::null()); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewObjectWithAllocationSite) { +RUNTIME_FUNCTION(RuntimeHidden_NewObjectWithAllocationSite) { HandleScope scope(isolate); ASSERT(args.length() == 2); - - Handle<Object> constructor = args.at<Object>(1); - Handle<Object> feedback = args.at<Object>(0); + CONVERT_ARG_HANDLE_CHECKED(Object, constructor, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, feedback, 0); Handle<AllocationSite> site; if (feedback->IsAllocationSite()) { // The feedback can be an AllocationSite or undefined. site = Handle<AllocationSite>::cast(feedback); } - return Runtime_NewObjectHelper(isolate, - constructor, - site); + return Runtime_NewObjectHelper(isolate, constructor, site); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_FinalizeInstanceSize) { +RUNTIME_FUNCTION(RuntimeHidden_FinalizeInstanceSize) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -8453,11 +8464,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_FinalizeInstanceSize) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CompileUnoptimized) { +RUNTIME_FUNCTION(RuntimeHidden_CompileUnoptimized) { HandleScope scope(isolate); ASSERT(args.length() == 1); - - Handle<JSFunction> function = args.at<JSFunction>(0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); #ifdef DEBUG if (FLAG_trace_lazy && !function->shared()->is_compiled()) { PrintF("[unoptimized: "); @@ -8469,8 +8479,9 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CompileUnoptimized) { // Compile the target function. ASSERT(function->shared()->allows_lazy_compilation()); - Handle<Code> code = Compiler::GetUnoptimizedCode(function); - RETURN_IF_EMPTY_HANDLE(isolate, code); + Handle<Code> code; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code, + Compiler::GetUnoptimizedCode(function)); function->ReplaceCode(*code); // All done. Return the compiled code. @@ -8482,10 +8493,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CompileUnoptimized) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CompileOptimized) { +RUNTIME_FUNCTION(RuntimeHidden_CompileOptimized) { HandleScope scope(isolate); ASSERT(args.length() == 2); - Handle<JSFunction> function = args.at<JSFunction>(0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1); Handle<Code> unoptimized(function->shared()->code()); @@ -8511,8 +8522,13 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_CompileOptimized) { } else { Compiler::ConcurrencyMode mode = concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT; - Handle<Code> code = Compiler::GetOptimizedCode(function, unoptimized, mode); - function->ReplaceCode(code.is_null() ? *unoptimized : *code); + Handle<Code> code; + if (Compiler::GetOptimizedCode( + function, unoptimized, mode).ToHandle(&code)) { + function->ReplaceCode(*code); + } else { + function->ReplaceCode(*unoptimized); + } } ASSERT(function->code()->kind() == Code::FUNCTION || @@ -8545,7 +8561,7 @@ class ActivationsFinder : public ThreadVisitor { }; -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NotifyStubFailure) { +RUNTIME_FUNCTION(RuntimeHidden_NotifyStubFailure) { HandleScope scope(isolate); ASSERT(args.length() == 0); Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); @@ -8555,12 +8571,12 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NotifyStubFailure) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NotifyDeoptimized) { +RUNTIME_FUNCTION(RuntimeHidden_NotifyDeoptimized) { HandleScope scope(isolate); ASSERT(args.length() == 1); - RUNTIME_ASSERT(args[0]->IsSmi()); + CONVERT_SMI_ARG_CHECKED(type_arg, 0); Deoptimizer::BailoutType type = - static_cast<Deoptimizer::BailoutType>(args.smi_at(0)); + static_cast<Deoptimizer::BailoutType>(type_arg); Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); ASSERT(AllowHeapAllocation::IsAllowed()); @@ -8614,7 +8630,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NotifyDeoptimized) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DeoptimizeFunction) { +RUNTIME_FUNCTION(Runtime_DeoptimizeFunction) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); @@ -8626,21 +8642,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeoptimizeFunction) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearFunctionTypeFeedback) { +RUNTIME_FUNCTION(Runtime_ClearFunctionTypeFeedback) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); + function->shared()->ClearTypeFeedbackInfo(); Code* unoptimized = function->shared()->code(); if (unoptimized->kind() == Code::FUNCTION) { unoptimized->ClearInlineCaches(); - unoptimized->ClearTypeFeedbackInfo(isolate->heap()); } return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_RunningInSimulator) { +RUNTIME_FUNCTION(Runtime_RunningInSimulator) { SealHandleScope shs(isolate); + ASSERT(args.length() == 0); #if defined(USE_SIMULATOR) return isolate->heap()->true_value(); #else @@ -8649,14 +8666,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RunningInSimulator) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsConcurrentRecompilationSupported) { - HandleScope scope(isolate); - return isolate->concurrent_recompilation_enabled() - ? isolate->heap()->true_value() : isolate->heap()->false_value(); +RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported) { + SealHandleScope shs(isolate); + ASSERT(args.length() == 0); + return isolate->heap()->ToBoolean( + isolate->concurrent_recompilation_enabled()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) { +RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 1 || args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); @@ -8691,7 +8709,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NeverOptimizeFunction) { +RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, function, 0); @@ -8700,7 +8718,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NeverOptimizeFunction) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) { +RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) { HandleScope scope(isolate); RUNTIME_ASSERT(args.length() == 1 || args.length() == 2); if (!isolate->use_crankshaft()) { @@ -8735,7 +8753,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_UnblockConcurrentRecompilation) { +RUNTIME_FUNCTION(Runtime_UnblockConcurrentRecompilation) { + ASSERT(args.length() == 0); RUNTIME_ASSERT(FLAG_block_concurrent_recompilation); RUNTIME_ASSERT(isolate->concurrent_recompilation_enabled()); isolate->optimizing_compiler_thread()->Unblock(); @@ -8743,7 +8762,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_UnblockConcurrentRecompilation) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) { +RUNTIME_FUNCTION(Runtime_GetOptimizationCount) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); @@ -8770,7 +8789,7 @@ static bool IsSuitableForOnStackReplacement(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { +RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); @@ -8831,15 +8850,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { PrintF(" at AST id %d]\n", ast_id.ToInt()); } result = Compiler::GetConcurrentlyOptimizedCode(job); - } else if (result.is_null() && - IsSuitableForOnStackReplacement(isolate, function, caller_code)) { + } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) { if (FLAG_trace_osr) { PrintF("[OSR - Compiling: "); function->PrintName(); PrintF(" at AST id %d]\n", ast_id.ToInt()); } - result = Compiler::GetOptimizedCode(function, caller_code, mode, ast_id); - if (result.is_identical_to(isolate->builtins()->InOptimizationQueue())) { + MaybeHandle<Code> maybe_result = Compiler::GetOptimizedCode( + function, caller_code, mode, ast_id); + if (maybe_result.ToHandle(&result) && + result.is_identical_to(isolate->builtins()->InOptimizationQueue())) { // Optimization is queued. Return to check later. return NULL; } @@ -8883,7 +8903,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) { +RUNTIME_FUNCTION(Runtime_SetAllocationTimeout) { SealHandleScope shs(isolate); ASSERT(args.length() == 2 || args.length() == 3); #ifdef DEBUG @@ -8905,21 +8925,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) { +RUNTIME_FUNCTION(Runtime_CheckIsBootstrapping) { SealHandleScope shs(isolate); + ASSERT(args.length() == 0); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetRootNaN) { +RUNTIME_FUNCTION(Runtime_GetRootNaN) { SealHandleScope shs(isolate); + ASSERT(args.length() == 0); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); return isolate->heap()->nan_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Call) { +RUNTIME_FUNCTION(Runtime_Call) { HandleScope scope(isolate); ASSERT(args.length() >= 2); int argc = args.length() - 2; @@ -8938,33 +8960,32 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Call) { } for (int i = 0; i < argc; ++i) { - MaybeObject* maybe = args[1 + i]; - Object* object; - if (!maybe->To<Object>(&object)) return maybe; - argv[i] = Handle<Object>(object, isolate); + argv[i] = Handle<Object>(args[1 + i], isolate); } - bool threw; Handle<JSReceiver> hfun(fun); Handle<Object> hreceiver(receiver, isolate); - Handle<Object> result = Execution::Call( - isolate, hfun, hreceiver, argc, argv, &threw, true); - - if (threw) return Failure::Exception(); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, hfun, hreceiver, argc, argv, true)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) { +RUNTIME_FUNCTION(Runtime_Apply) { HandleScope scope(isolate); ASSERT(args.length() == 5); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, fun, 0); - Handle<Object> receiver = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(Object, receiver, 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, arguments, 2); CONVERT_SMI_ARG_CHECKED(offset, 3); CONVERT_SMI_ARG_CHECKED(argc, 4); RUNTIME_ASSERT(offset >= 0); - RUNTIME_ASSERT(argc >= 0); + // Loose upper bound to allow fuzzing. We'll most likely run out of + // stack space before hitting this limit. + static int kMaxArgc = 1000000; + RUNTIME_ASSERT(argc >= 0 && argc <= kMaxArgc); // If there are too many arguments, allocate argv via malloc. const int argv_small_size = 10; @@ -8978,167 +8999,152 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) { } for (int i = 0; i < argc; ++i) { - argv[i] = Object::GetElement(isolate, arguments, offset + i); - RETURN_IF_EMPTY_HANDLE(isolate, argv[i]); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, argv[i], + Object::GetElement(isolate, arguments, offset + i)); } - bool threw; - Handle<Object> result = Execution::Call( - isolate, fun, receiver, argc, argv, &threw, true); - - if (threw) return Failure::Exception(); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, fun, receiver, argc, argv, true)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionDelegate) { +RUNTIME_FUNCTION(Runtime_GetFunctionDelegate) { HandleScope scope(isolate); ASSERT(args.length() == 1); - RUNTIME_ASSERT(!args[0]->IsJSFunction()); - return *Execution::GetFunctionDelegate(isolate, args.at<Object>(0)); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); + RUNTIME_ASSERT(!object->IsJSFunction()); + return *Execution::GetFunctionDelegate(isolate, object); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetConstructorDelegate) { +RUNTIME_FUNCTION(Runtime_GetConstructorDelegate) { HandleScope scope(isolate); ASSERT(args.length() == 1); - RUNTIME_ASSERT(!args[0]->IsJSFunction()); - return *Execution::GetConstructorDelegate(isolate, args.at<Object>(0)); + CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); + RUNTIME_ASSERT(!object->IsJSFunction()); + return *Execution::GetConstructorDelegate(isolate, object); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewGlobalContext) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_NewGlobalContext) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSFunction, function, 0); - CONVERT_ARG_CHECKED(ScopeInfo, scope_info, 1); - Context* result; - MaybeObject* maybe_result = - isolate->heap()->AllocateGlobalContext(function, scope_info); - if (!maybe_result->To(&result)) return maybe_result; + CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); + CONVERT_ARG_HANDLE_CHECKED(ScopeInfo, scope_info, 1); + Handle<Context> result = + isolate->factory()->NewGlobalContext(function, scope_info); ASSERT(function->context() == isolate->context()); ASSERT(function->context()->global_object() == result->global_object()); - result->global_object()->set_global_context(result); - - return result; // non-failure + result->global_object()->set_global_context(*result); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_NewFunctionContext) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_NewFunctionContext) { + HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSFunction, function, 0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); int length = function->shared()->scope_info()->ContextLength(); - return isolate->heap()->AllocateFunctionContext(length, function); + return *isolate->factory()->NewFunctionContext(length, function); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PushWithContext) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_PushWithContext) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - JSReceiver* extension_object; + Handle<JSReceiver> extension_object; if (args[0]->IsJSReceiver()) { - extension_object = JSReceiver::cast(args[0]); + extension_object = args.at<JSReceiver>(0); } else { - // Convert the object to a proper JavaScript object. - MaybeObject* maybe_js_object = args[0]->ToObject(isolate); - if (!maybe_js_object->To(&extension_object)) { - if (Failure::cast(maybe_js_object)->IsInternalError()) { - HandleScope scope(isolate); - Handle<Object> handle = args.at<Object>(0); - Handle<Object> result = - isolate->factory()->NewTypeError("with_expression", - HandleVector(&handle, 1)); - return isolate->Throw(*result); - } else { - return maybe_js_object; - } + // Try to convert the object to a proper JavaScript object. + MaybeHandle<JSReceiver> maybe_object = + Object::ToObject(isolate, args.at<Object>(0)); + if (!maybe_object.ToHandle(&extension_object)) { + Handle<Object> handle = args.at<Object>(0); + Handle<Object> result = + isolate->factory()->NewTypeError("with_expression", + HandleVector(&handle, 1)); + return isolate->Throw(*result); } } - JSFunction* function; + Handle<JSFunction> function; if (args[1]->IsSmi()) { // A smi sentinel indicates a context nested inside global code rather // than some function. There is a canonical empty function that can be // gotten from the native context. - function = isolate->context()->native_context()->closure(); + function = handle(isolate->context()->native_context()->closure()); } else { - function = JSFunction::cast(args[1]); + function = args.at<JSFunction>(1); } - Context* context; - MaybeObject* maybe_context = - isolate->heap()->AllocateWithContext(function, - isolate->context(), - extension_object); - if (!maybe_context->To(&context)) return maybe_context; - isolate->set_context(context); - return context; + Handle<Context> current(isolate->context()); + Handle<Context> context = isolate->factory()->NewWithContext( + function, current, extension_object); + isolate->set_context(*context); + return *context; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PushCatchContext) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_PushCatchContext) { + HandleScope scope(isolate); ASSERT(args.length() == 3); - String* name = String::cast(args[0]); - Object* thrown_object = args[1]; - JSFunction* function; + CONVERT_ARG_HANDLE_CHECKED(String, name, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, thrown_object, 1); + Handle<JSFunction> function; if (args[2]->IsSmi()) { // A smi sentinel indicates a context nested inside global code rather // than some function. There is a canonical empty function that can be // gotten from the native context. - function = isolate->context()->native_context()->closure(); + function = handle(isolate->context()->native_context()->closure()); } else { - function = JSFunction::cast(args[2]); + function = args.at<JSFunction>(2); } - Context* context; - MaybeObject* maybe_context = - isolate->heap()->AllocateCatchContext(function, - isolate->context(), - name, - thrown_object); - if (!maybe_context->To(&context)) return maybe_context; - isolate->set_context(context); - return context; + Handle<Context> current(isolate->context()); + Handle<Context> context = isolate->factory()->NewCatchContext( + function, current, name, thrown_object); + isolate->set_context(*context); + return *context; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PushBlockContext) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_PushBlockContext) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - ScopeInfo* scope_info = ScopeInfo::cast(args[0]); - JSFunction* function; + CONVERT_ARG_HANDLE_CHECKED(ScopeInfo, scope_info, 0); + Handle<JSFunction> function; if (args[1]->IsSmi()) { // A smi sentinel indicates a context nested inside global code rather // than some function. There is a canonical empty function that can be // gotten from the native context. - function = isolate->context()->native_context()->closure(); + function = handle(isolate->context()->native_context()->closure()); } else { - function = JSFunction::cast(args[1]); + function = args.at<JSFunction>(1); } - Context* context; - MaybeObject* maybe_context = - isolate->heap()->AllocateBlockContext(function, - isolate->context(), - scope_info); - if (!maybe_context->To(&context)) return maybe_context; - isolate->set_context(context); - return context; + Handle<Context> current(isolate->context()); + Handle<Context> context = isolate->factory()->NewBlockContext( + function, current, scope_info); + isolate->set_context(*context); + return *context; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSModule) { +RUNTIME_FUNCTION(Runtime_IsJSModule) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* obj = args[0]; + CONVERT_ARG_CHECKED(Object, obj, 0); return isolate->heap()->ToBoolean(obj->IsJSModule()); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PushModuleContext) { +RUNTIME_FUNCTION(RuntimeHidden_PushModuleContext) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(index, 0); @@ -9173,7 +9179,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PushModuleContext) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareModules) { +RUNTIME_FUNCTION(RuntimeHidden_DeclareModules) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(FixedArray, descriptions, 0); @@ -9198,15 +9204,16 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareModules) { IsImmutableVariableMode(mode) ? FROZEN : SEALED; Handle<AccessorInfo> info = Accessors::MakeModuleExport(name, index, attr); - Handle<Object> result = JSObject::SetAccessor(module, info); - ASSERT(!(result.is_null() || result->IsUndefined())); + Handle<Object> result = + JSObject::SetAccessor(module, info).ToHandleChecked(); + ASSERT(!result->IsUndefined()); USE(result); break; } case MODULE: { Object* referenced_context = Context::cast(host_context)->get(index); Handle<JSModule> value(Context::cast(referenced_context)->module()); - JSReceiver::SetProperty(module, name, value, FROZEN, STRICT); + JSReceiver::SetProperty(module, name, value, FROZEN, STRICT).Assert(); break; } case INTERNAL: @@ -9218,7 +9225,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareModules) { } } - JSObject::PreventExtensions(module); + JSObject::PreventExtensions(module).Assert(); } ASSERT(!isolate->has_pending_exception()); @@ -9226,7 +9233,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeclareModules) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeleteContextSlot) { +RUNTIME_FUNCTION(RuntimeHidden_DeleteContextSlot) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -9257,8 +9264,10 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeleteContextSlot) { // the global object, or the subject of a with. Try to delete it // (respecting DONT_DELETE). Handle<JSObject> object = Handle<JSObject>::cast(holder); - Handle<Object> result = JSReceiver::DeleteProperty(object, name); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSReceiver::DeleteProperty(object, name)); return *result; } @@ -9273,12 +9282,12 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_DeleteContextSlot) { // allocated by the caller, and passed as a pointer in a hidden first parameter. #ifdef V8_HOST_ARCH_64_BIT struct ObjectPair { - MaybeObject* x; - MaybeObject* y; + Object* x; + Object* y; }; -static inline ObjectPair MakePair(MaybeObject* x, MaybeObject* y) { +static inline ObjectPair MakePair(Object* x, Object* y) { ObjectPair result = {x, y}; // Pointers x and y returned in rax and rdx, in AMD-x64-abi. // In Win64 they are assigned to a hidden first argument. @@ -9286,20 +9295,18 @@ static inline ObjectPair MakePair(MaybeObject* x, MaybeObject* y) { } #else typedef uint64_t ObjectPair; -static inline ObjectPair MakePair(MaybeObject* x, MaybeObject* y) { +static inline ObjectPair MakePair(Object* x, Object* y) { +#if defined(V8_TARGET_LITTLE_ENDIAN) return reinterpret_cast<uint32_t>(x) | (reinterpret_cast<ObjectPair>(y) << 32); -} +#elif defined(V8_TARGET_BIG_ENDIAN) + return reinterpret_cast<uint32_t>(y) | + (reinterpret_cast<ObjectPair>(x) << 32); +#else +#error Unknown endianness #endif - - -static inline MaybeObject* Unhole(Heap* heap, - MaybeObject* x, - PropertyAttributes attributes) { - ASSERT(!x->IsTheHole() || (attributes & READ_ONLY) != 0); - USE(attributes); - return x->IsTheHole() ? heap->undefined_value() : x; } +#endif static Object* ComputeReceiverForNonGlobal(Isolate* isolate, @@ -9344,7 +9351,7 @@ static ObjectPair LoadContextSlotHelper(Arguments args, &attributes, &binding_flags); if (isolate->has_pending_exception()) { - return MakePair(Failure::Exception(), NULL); + return MakePair(isolate->heap()->exception(), NULL); } // If the index is non-negative, the slot has been found in a context. @@ -9371,7 +9378,11 @@ static ObjectPair LoadContextSlotHelper(Arguments args, ASSERT(!value->IsTheHole()); return MakePair(value, *receiver); case IMMUTABLE_CHECK_INITIALIZED: - return MakePair(Unhole(isolate->heap(), value, attributes), *receiver); + if (value->IsTheHole()) { + ASSERT((attributes & READ_ONLY) != 0); + value = isolate->heap()->undefined_value(); + } + return MakePair(value, *receiver); case MISSING_BINDING: UNREACHABLE(); return MakePair(NULL, NULL); @@ -9394,8 +9405,12 @@ static ObjectPair LoadContextSlotHelper(Arguments args, // No need to unhole the value here. This is taken care of by the // GetProperty function. - MaybeObject* value = object->GetProperty(*name); - return MakePair(value, *receiver_handle); + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, value, + Object::GetProperty(object, name), + MakePair(isolate->heap()->exception(), NULL)); + return MakePair(*value, *receiver_handle); } if (throw_error) { @@ -9412,21 +9427,21 @@ static ObjectPair LoadContextSlotHelper(Arguments args, } -RUNTIME_FUNCTION(ObjectPair, RuntimeHidden_LoadContextSlot) { +RUNTIME_FUNCTION_RETURN_PAIR(RuntimeHidden_LoadContextSlot) { return LoadContextSlotHelper(args, isolate, true); } -RUNTIME_FUNCTION(ObjectPair, RuntimeHidden_LoadContextSlotNoReferenceError) { +RUNTIME_FUNCTION_RETURN_PAIR(RuntimeHidden_LoadContextSlotNoReferenceError) { return LoadContextSlotHelper(args, isolate, false); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StoreContextSlot) { +RUNTIME_FUNCTION(RuntimeHidden_StoreContextSlot) { HandleScope scope(isolate); ASSERT(args.length() == 4); - Handle<Object> value(args[0], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 0); CONVERT_ARG_HANDLE_CHECKED(Context, context, 1); CONVERT_ARG_HANDLE_CHECKED(String, name, 2); CONVERT_STRICT_MODE_ARG_CHECKED(strict_mode, 3); @@ -9440,7 +9455,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StoreContextSlot) { &index, &attributes, &binding_flags); - if (isolate->has_pending_exception()) return Failure::Exception(); + if (isolate->has_pending_exception()) return isolate->heap()->exception(); if (index >= 0) { // The property was found in a context slot. @@ -9493,7 +9508,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StoreContextSlot) { // Set the property if it's not read only or doesn't yet exist. if ((attributes & READ_ONLY) == 0 || (JSReceiver::GetLocalPropertyAttribute(object, name) == ABSENT)) { - RETURN_IF_EMPTY_HANDLE( + RETURN_FAILURE_ON_EXCEPTION( isolate, JSReceiver::SetProperty(object, name, value, NONE, strict_mode)); } else if (strict_mode == STRICT && (attributes & READ_ONLY) != 0) { @@ -9507,7 +9522,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StoreContextSlot) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_Throw) { +RUNTIME_FUNCTION(RuntimeHidden_Throw) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -9515,7 +9530,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_Throw) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ReThrow) { +RUNTIME_FUNCTION(RuntimeHidden_ReThrow) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -9523,18 +9538,17 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ReThrow) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_PromoteScheduledException) { +RUNTIME_FUNCTION(RuntimeHidden_PromoteScheduledException) { SealHandleScope shs(isolate); - ASSERT_EQ(0, args.length()); + ASSERT(args.length() == 0); return isolate->PromoteScheduledException(); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowReferenceError) { +RUNTIME_FUNCTION(RuntimeHidden_ThrowReferenceError) { HandleScope scope(isolate); ASSERT(args.length() == 1); - - Handle<Object> name(args[0], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, name, 0); Handle<Object> reference_error = isolate->factory()->NewReferenceError("not_defined", HandleVector(&name, 1)); @@ -9542,7 +9556,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowReferenceError) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowNotDateError) { +RUNTIME_FUNCTION(RuntimeHidden_ThrowNotDateError) { HandleScope scope(isolate); ASSERT(args.length() == 0); return isolate->Throw(*isolate->factory()->NewTypeError( @@ -9550,20 +9564,19 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowNotDateError) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ThrowMessage) { +RUNTIME_FUNCTION(RuntimeHidden_ThrowMessage) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_SMI_ARG_CHECKED(message_id, 0); const char* message = GetBailoutReason( static_cast<BailoutReason>(message_id)); Handle<String> message_handle = - isolate->factory()->NewStringFromAscii(CStrVector(message)); - RETURN_IF_EMPTY_HANDLE(isolate, message_handle); + isolate->factory()->NewStringFromAsciiChecked(message); return isolate->Throw(*message_handle); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StackGuard) { +RUNTIME_FUNCTION(RuntimeHidden_StackGuard) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); @@ -9576,7 +9589,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_StackGuard) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_TryInstallOptimizedCode) { +RUNTIME_FUNCTION(RuntimeHidden_TryInstallOptimizedCode) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); @@ -9593,7 +9606,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_TryInstallOptimizedCode) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_Interrupt) { +RUNTIME_FUNCTION(RuntimeHidden_Interrupt) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); return Execution::HandleStackGuardInterrupt(isolate); @@ -9629,7 +9642,7 @@ static void PrintTransition(Isolate* isolate, Object* result) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceEnter) { +RUNTIME_FUNCTION(Runtime_TraceEnter) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); PrintTransition(isolate, NULL); @@ -9637,14 +9650,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceEnter) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceExit) { +RUNTIME_FUNCTION(Runtime_TraceExit) { SealHandleScope shs(isolate); - PrintTransition(isolate, args[0]); - return args[0]; // return TOS + ASSERT(args.length() == 1); + CONVERT_ARG_CHECKED(Object, obj, 0); + PrintTransition(isolate, obj); + return obj; // return TOS } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrint) { +RUNTIME_FUNCTION(Runtime_DebugPrint) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -9675,7 +9690,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrint) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugTrace) { +RUNTIME_FUNCTION(Runtime_DebugTrace) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); isolate->PrintStack(stdout); @@ -9683,8 +9698,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugTrace) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateCurrentTime) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DateCurrentTime) { + HandleScope scope(isolate); ASSERT(args.length() == 0); // According to ECMA-262, section 15.9.1, page 117, the precision of @@ -9692,36 +9707,35 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateCurrentTime) { // time is milliseconds. Therefore, we floor the result of getting // the OS time. double millis = std::floor(OS::TimeCurrentMillis()); - return isolate->heap()->NumberFromDouble(millis); + return *isolate->factory()->NewNumber(millis); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateParseString) { +RUNTIME_FUNCTION(Runtime_DateParseString) { HandleScope scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_HANDLE_CHECKED(String, str, 0); - FlattenString(str); - CONVERT_ARG_HANDLE_CHECKED(JSArray, output, 1); + RUNTIME_ASSERT(output->HasFastElements()); JSObject::EnsureCanContainHeapObjectElements(output); RUNTIME_ASSERT(output->HasFastObjectElements()); + Handle<FixedArray> output_array(FixedArray::cast(output->elements())); + RUNTIME_ASSERT(output_array->length() >= DateParser::OUTPUT_SIZE); + str = String::Flatten(str); DisallowHeapAllocation no_gc; - FixedArray* output_array = FixedArray::cast(output->elements()); - RUNTIME_ASSERT(output_array->length() >= DateParser::OUTPUT_SIZE); bool result; String::FlatContent str_content = str->GetFlatContent(); if (str_content.IsAscii()) { result = DateParser::Parse(str_content.ToOneByteVector(), - output_array, + *output_array, isolate->unicode_cache()); } else { ASSERT(str_content.IsTwoByte()); result = DateParser::Parse(str_content.ToUC16Vector(), - output_array, + *output_array, isolate->unicode_cache()); } @@ -9733,29 +9747,31 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateParseString) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateLocalTimezone) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DateLocalTimezone) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); const char* zone = isolate->date_cache()->LocalTimezone(static_cast<int64_t>(x)); - return isolate->heap()->AllocateStringFromUtf8(CStrVector(zone)); + Handle<String> result = isolate->factory()->NewStringFromUtf8( + CStrVector(zone)).ToHandleChecked(); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateToUTC) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DateToUTC) { + HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_DOUBLE_ARG_CHECKED(x, 0); int64_t time = isolate->date_cache()->ToUTC(static_cast<int64_t>(x)); - return isolate->heap()->NumberFromDouble(static_cast<double>(time)); + return *isolate->factory()->NewNumber(static_cast<double>(time)); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DateCacheVersion) { +RUNTIME_FUNCTION(Runtime_DateCacheVersion) { HandleScope hs(isolate); ASSERT(args.length() == 0); if (!isolate->eternal_handles()->Exists(EternalHandles::DATE_CACHE_VERSION)) { @@ -9776,43 +9792,37 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateCacheVersion) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalReceiver) { +RUNTIME_FUNCTION(Runtime_GlobalReceiver) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* global = args[0]; + CONVERT_ARG_CHECKED(Object, global, 0); if (!global->IsJSGlobalObject()) return isolate->heap()->null_value(); return JSGlobalObject::cast(global)->global_receiver(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsAttachedGlobal) { +RUNTIME_FUNCTION(Runtime_IsAttachedGlobal) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - Object* global = args[0]; + CONVERT_ARG_CHECKED(Object, global, 0); if (!global->IsJSGlobalObject()) return isolate->heap()->false_value(); return isolate->heap()->ToBoolean( !JSGlobalObject::cast(global)->IsDetached()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) { +RUNTIME_FUNCTION(Runtime_ParseJson) { HandleScope scope(isolate); - ASSERT_EQ(1, args.length()); + ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, source, 0); - source = Handle<String>(FlattenGetString(source)); + source = String::Flatten(source); // Optimized fast case where we only have ASCII characters. Handle<Object> result; - if (source->IsSeqOneByteString()) { - result = JsonParser<true>::Parse(source); - } else { - result = JsonParser<false>::Parse(source); - } - if (result.is_null()) { - // Syntax error or stack overflow in scanner. - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + source->IsSeqOneByteString() ? JsonParser<true>::Parse(source) + : JsonParser<false>::Parse(source)); return *result; } @@ -9834,9 +9844,9 @@ bool CodeGenerationFromStringsAllowed(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) { +RUNTIME_FUNCTION(Runtime_CompileString) { HandleScope scope(isolate); - ASSERT_EQ(2, args.length()); + ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(String, source, 0); CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1); @@ -9856,9 +9866,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) { // Compile source string in the native context. ParseRestriction restriction = function_literal_only ? ONLY_SINGLE_FUNCTION_LITERAL : NO_PARSE_RESTRICTION; - Handle<JSFunction> fun = Compiler::GetFunctionFromEval( - source, context, SLOPPY, restriction, RelocInfo::kNoPosition); - RETURN_IF_EMPTY_HANDLE(isolate, fun); + Handle<JSFunction> fun; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, fun, + Compiler::GetFunctionFromEval( + source, context, SLOPPY, restriction, RelocInfo::kNoPosition)); return *fun; } @@ -9879,21 +9891,23 @@ static ObjectPair CompileGlobalEval(Isolate* isolate, native_context->ErrorMessageForCodeGenerationFromStrings(); isolate->Throw(*isolate->factory()->NewEvalError( "code_gen_from_strings", HandleVector<Object>(&error_message, 1))); - return MakePair(Failure::Exception(), NULL); + return MakePair(isolate->heap()->exception(), NULL); } // Deal with a normal eval call with a string argument. Compile it // and return the compiled function bound in the local context. static const ParseRestriction restriction = NO_PARSE_RESTRICTION; - Handle<JSFunction> compiled = Compiler::GetFunctionFromEval( - source, context, strict_mode, restriction, scope_position); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, compiled, - MakePair(Failure::Exception(), NULL)); + Handle<JSFunction> compiled; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, compiled, + Compiler::GetFunctionFromEval( + source, context, strict_mode, restriction, scope_position), + MakePair(isolate->heap()->exception(), NULL)); return MakePair(*compiled, *receiver); } -RUNTIME_FUNCTION(ObjectPair, RuntimeHidden_ResolvePossiblyDirectEval) { +RUNTIME_FUNCTION_RETURN_PAIR(RuntimeHidden_ResolvePossiblyDirectEval) { HandleScope scope(isolate); ASSERT(args.length() == 5); @@ -9921,52 +9935,35 @@ RUNTIME_FUNCTION(ObjectPair, RuntimeHidden_ResolvePossiblyDirectEval) { } -// Allocate a block of memory in the given space (filled with a filler). -// Used as a fall-back for generated code when the space is full. -static MaybeObject* Allocate(Isolate* isolate, - int size, - bool double_align, - AllocationSpace space) { - Heap* heap = isolate->heap(); +RUNTIME_FUNCTION(RuntimeHidden_AllocateInNewSpace) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_SMI_ARG_CHECKED(size, 0); RUNTIME_ASSERT(IsAligned(size, kPointerSize)); RUNTIME_ASSERT(size > 0); RUNTIME_ASSERT(size <= Page::kMaxRegularHeapObjectSize); - HeapObject* allocation; - { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space, space); - if (!maybe_allocation->To(&allocation)) return maybe_allocation; - } -#ifdef DEBUG - MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); - ASSERT(chunk->owner()->identity() == space); -#endif - heap->CreateFillerObjectAt(allocation->address(), size); - return allocation; + return *isolate->factory()->NewFillerObject(size, false, NEW_SPACE); } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_AllocateInNewSpace) { - SealHandleScope shs(isolate); - ASSERT(args.length() == 1); - CONVERT_SMI_ARG_CHECKED(size, 0); - return Allocate(isolate, size, false, NEW_SPACE); -} - - -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_AllocateInTargetSpace) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(RuntimeHidden_AllocateInTargetSpace) { + HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_SMI_ARG_CHECKED(size, 0); CONVERT_SMI_ARG_CHECKED(flags, 1); + RUNTIME_ASSERT(IsAligned(size, kPointerSize)); + RUNTIME_ASSERT(size > 0); + RUNTIME_ASSERT(size <= Page::kMaxRegularHeapObjectSize); bool double_align = AllocateDoubleAlignFlag::decode(flags); AllocationSpace space = AllocateTargetSpace::decode(flags); - return Allocate(isolate, size, double_align, space); + return *isolate->factory()->NewFillerObject(size, double_align, space); } // Push an object unto an array of objects if it is not already in the // array. Returns true if the element was pushed on the stack and // false otherwise. -RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) { +RUNTIME_FUNCTION(Runtime_PushIfAbsent) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); @@ -9979,10 +9976,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) { } // Strict not needed. Used for cycle detection in Array join implementation. - RETURN_IF_EMPTY_HANDLE(isolate, JSObject::SetFastElement(array, length, - element, - SLOPPY, - true)); + RETURN_FAILURE_ON_EXCEPTION( + isolate, + JSObject::SetFastElement(array, length, element, SLOPPY, true)); return isolate->heap()->true_value(); } @@ -10037,7 +10033,7 @@ class ArrayConcatVisitor { Handle<SeededNumberDictionary> dict( SeededNumberDictionary::cast(*storage_)); Handle<SeededNumberDictionary> result = - isolate_->factory()->DictionaryAtNumberPut(dict, index, elm); + SeededNumberDictionary::AtNumberPut(dict, index, elm); if (!result.is_identical_to(dict)) { // Dictionary needed to grow. clear_storage(); @@ -10061,12 +10057,9 @@ class ArrayConcatVisitor { Handle<JSArray> array = isolate_->factory()->NewJSArray(0); Handle<Object> length = isolate_->factory()->NewNumber(static_cast<double>(index_offset_)); - Handle<Map> map; - if (fast_elements_) { - map = JSObject::GetElementsTransitionMap(array, FAST_HOLEY_ELEMENTS); - } else { - map = JSObject::GetElementsTransitionMap(array, DICTIONARY_ELEMENTS); - } + Handle<Map> map = JSObject::GetElementsTransitionMap( + array, + fast_elements_ ? FAST_HOLEY_ELEMENTS : DICTIONARY_ELEMENTS); array->set_map(*map); array->set_length(*length); array->set_elements(*storage_); @@ -10079,15 +10072,14 @@ class ArrayConcatVisitor { ASSERT(fast_elements_); Handle<FixedArray> current_storage(*storage_); Handle<SeededNumberDictionary> slow_storage( - isolate_->factory()->NewSeededNumberDictionary( - current_storage->length())); + SeededNumberDictionary::New(isolate_, current_storage->length())); uint32_t current_length = static_cast<uint32_t>(current_storage->length()); for (uint32_t i = 0; i < current_length; i++) { HandleScope loop_scope(isolate_); Handle<Object> element(current_storage->get(i), isolate_); if (!element->IsTheHole()) { Handle<SeededNumberDictionary> new_storage = - isolate_->factory()->DictionaryAtNumberPut(slow_storage, i, element); + SeededNumberDictionary::AtNumberPut(slow_storage, i, element); if (!new_storage.is_identical_to(slow_storage)) { slow_storage = loop_scope.CloseAndEscape(new_storage); } @@ -10349,8 +10341,10 @@ static bool IterateElements(Isolate* isolate, } else if (JSReceiver::HasElement(receiver, j)) { // Call GetElement on receiver, not its prototype, or getters won't // have the correct receiver. - element_value = Object::GetElement(isolate, receiver, j); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element_value, false); + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, element_value, + Object::GetElement(isolate, receiver, j), + false); visitor->visit(j, element_value); } } @@ -10358,6 +10352,8 @@ static bool IterateElements(Isolate* isolate, } case FAST_HOLEY_DOUBLE_ELEMENTS: case FAST_DOUBLE_ELEMENTS: { + // Empty array is FixedArray but not FixedDoubleArray. + if (length == 0) break; // Run through the elements FixedArray and use HasElement and GetElement // to check the prototype for missing elements. Handle<FixedDoubleArray> elements( @@ -10374,9 +10370,11 @@ static bool IterateElements(Isolate* isolate, } else if (JSReceiver::HasElement(receiver, j)) { // Call GetElement on receiver, not its prototype, or getters won't // have the correct receiver. - Handle<Object> element_value = - Object::GetElement(isolate, receiver, j); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element_value, false); + Handle<Object> element_value; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, element_value, + Object::GetElement(isolate, receiver, j), + false); visitor->visit(j, element_value); } } @@ -10394,8 +10392,11 @@ static bool IterateElements(Isolate* isolate, while (j < n) { HandleScope loop_scope(isolate); uint32_t index = indices[j]; - Handle<Object> element = Object::GetElement(isolate, receiver, index); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element, false); + Handle<Object> element; + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, element, + Object::GetElement(isolate, receiver, index), + false); visitor->visit(index, element); // Skip to next different index (i.e., omit duplicates). do { @@ -10468,7 +10469,7 @@ static bool IterateElements(Isolate* isolate, * TODO(581): Fix non-compliance for very large concatenations and update to * following the ECMAScript 5 specification. */ -RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { +RUNTIME_FUNCTION(Runtime_ArrayConcat) { HandleScope handle_scope(isolate); ASSERT(args.length() == 1); @@ -10535,12 +10536,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { // dictionary. bool fast_case = (estimate_nof_elements * 2) >= estimate_result_length; - Handle<FixedArray> storage; - if (fast_case) { - if (kind == FAST_DOUBLE_ELEMENTS) { + if (fast_case && kind == FAST_DOUBLE_ELEMENTS) { + Handle<FixedArrayBase> storage = + isolate->factory()->NewFixedDoubleArray(estimate_result_length); + int j = 0; + if (estimate_result_length > 0) { Handle<FixedDoubleArray> double_storage = - isolate->factory()->NewFixedDoubleArray(estimate_result_length); - int j = 0; + Handle<FixedDoubleArray>::cast(storage); bool failure = false; for (int i = 0; i < argument_count; i++) { Handle<Object> obj(elements->get(i), isolate); @@ -10556,8 +10558,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { switch (array->map()->elements_kind()) { case FAST_HOLEY_DOUBLE_ELEMENTS: case FAST_DOUBLE_ELEMENTS: { - // Empty fixed array indicates that there are no elements. - if (array->elements()->IsFixedArray()) break; + // Empty array is FixedArray but not FixedDoubleArray. + if (length == 0) break; FixedDoubleArray* elements = FixedDoubleArray::cast(array->elements()); for (uint32_t i = 0; i < length; i++) { @@ -10596,15 +10598,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { } if (failure) break; } - Handle<JSArray> array = isolate->factory()->NewJSArray(0); - Smi* length = Smi::FromInt(j); - Handle<Map> map; - map = JSObject::GetElementsTransitionMap(array, kind); - array->set_map(*map); - array->set_length(length); - array->set_elements(*double_storage); - return *array; } + Handle<JSArray> array = isolate->factory()->NewJSArray(0); + Smi* length = Smi::FromInt(j); + Handle<Map> map; + map = JSObject::GetElementsTransitionMap(array, kind); + array->set_map(*map); + array->set_length(length); + array->set_elements(*storage); + return *array; + } + + Handle<FixedArray> storage; + if (fast_case) { // The backing storage array must have non-existing elements to preserve // holes across concat operations. storage = isolate->factory()->NewFixedArrayWithHoles( @@ -10614,7 +10620,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { uint32_t at_least_space_for = estimate_nof_elements + (estimate_nof_elements >> 2); storage = Handle<FixedArray>::cast( - isolate->factory()->NewSeededNumberDictionary(at_least_space_for)); + SeededNumberDictionary::New(isolate, at_least_space_for)); } ArrayConcatVisitor visitor(isolate, storage, fast_case); @@ -10624,7 +10630,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { if (obj->IsJSArray()) { Handle<JSArray> array = Handle<JSArray>::cast(obj); if (!IterateElements(isolate, array, &visitor)) { - return Failure::Exception(); + return isolate->heap()->exception(); } } else { visitor.visit(0, obj); @@ -10643,7 +10649,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) { // This will not allocate (flatten the string), but it may run // very slowly for very deeply nested ConsStrings. For debugging use only. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalPrint) { +RUNTIME_FUNCTION(Runtime_GlobalPrint) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -10664,7 +10670,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalPrint) { // property. // Returns the number of non-undefined elements collected. // Returns -1 if hole removal is not supported by this method. -RUNTIME_FUNCTION(MaybeObject*, Runtime_RemoveArrayHoles) { +RUNTIME_FUNCTION(Runtime_RemoveArrayHoles) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); @@ -10674,33 +10680,30 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RemoveArrayHoles) { // Move contents of argument 0 (an array) to argument 1 (an array) -RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_MoveArrayContents) { + HandleScope scope(isolate); ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSArray, from, 0); - CONVERT_ARG_CHECKED(JSArray, to, 1); - from->ValidateElements(); - to->ValidateElements(); - FixedArrayBase* new_elements = from->elements(); + CONVERT_ARG_HANDLE_CHECKED(JSArray, from, 0); + CONVERT_ARG_HANDLE_CHECKED(JSArray, to, 1); + JSObject::ValidateElements(from); + JSObject::ValidateElements(to); + + Handle<FixedArrayBase> new_elements(from->elements()); ElementsKind from_kind = from->GetElementsKind(); - MaybeObject* maybe_new_map; - maybe_new_map = to->GetElementsTransitionMap(isolate, from_kind); - Object* new_map; - if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map; - to->set_map_and_elements(Map::cast(new_map), new_elements); + Handle<Map> new_map = JSObject::GetElementsTransitionMap(to, from_kind); + JSObject::SetMapAndElements(to, new_map, new_elements); to->set_length(from->length()); - Object* obj; - { MaybeObject* maybe_obj = from->ResetElements(); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } + + JSObject::ResetElements(from); from->set_length(Smi::FromInt(0)); - to->ValidateElements(); - return to; + + JSObject::ValidateElements(to); + return *to; } // How many elements does this object/array have? -RUNTIME_FUNCTION(MaybeObject*, Runtime_EstimateNumberOfElements) { +RUNTIME_FUNCTION(Runtime_EstimateNumberOfElements) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSObject, object, 0); @@ -10721,7 +10724,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_EstimateNumberOfElements) { // or undefined) or a number representing the positive length of an interval // starting at index 0. // Intervals can span over some keys that are not in the object. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) { +RUNTIME_FUNCTION(Runtime_GetArrayKeys) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, array, 0); @@ -10741,7 +10744,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) { isolate->factory()->NewFixedArray( current->NumberOfLocalElements(NONE)); current->GetLocalElementKeys(*current_keys, NONE); - keys = UnionOfKeys(keys, current_keys); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, keys, FixedArray::UnionOfKeys(keys, current_keys)); } // Erase any keys >= length. // TODO(adamk): Remove this step when the contract of %GetArrayKeys @@ -10759,7 +10763,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_LookupAccessor) { +RUNTIME_FUNCTION(Runtime_LookupAccessor) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, receiver, 0); @@ -10767,15 +10771,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LookupAccessor) { CONVERT_SMI_ARG_CHECKED(flag, 2); AccessorComponent component = flag == 0 ? ACCESSOR_GETTER : ACCESSOR_SETTER; if (!receiver->IsJSObject()) return isolate->heap()->undefined_value(); - Handle<Object> result = - JSObject::GetAccessor(Handle<JSObject>::cast(receiver), name, component); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::GetAccessor(Handle<JSObject>::cast(receiver), name, component)); return *result; } -#ifdef ENABLE_DEBUGGER_SUPPORT -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugBreak) { +RUNTIME_FUNCTION(Runtime_DebugBreak) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); return Execution::DebugBreakHelper(isolate); @@ -10798,21 +10802,21 @@ static StackFrame::Id UnwrapFrameId(int wrapped) { // args[0]: debug event listener function to set or null or undefined for // clearing the event listener function // args[1]: object supplied during callback -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDebugEventListener) { +RUNTIME_FUNCTION(Runtime_SetDebugEventListener) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); RUNTIME_ASSERT(args[0]->IsJSFunction() || args[0]->IsUndefined() || args[0]->IsNull()); - Handle<Object> callback = args.at<Object>(0); - Handle<Object> data = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(Object, callback, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, data, 1); isolate->debugger()->SetEventListener(callback, data); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Break) { +RUNTIME_FUNCTION(Runtime_Break) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); isolate->stack_guard()->DebugBreak(); @@ -10820,64 +10824,50 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Break) { } -static MaybeObject* DebugLookupResultValue(Heap* heap, - Object* receiver, - Name* name, - LookupResult* result, - bool* caught_exception) { - Object* value; +static Handle<Object> DebugLookupResultValue(Isolate* isolate, + Handle<Object> receiver, + Handle<Name> name, + LookupResult* result, + bool* has_caught = NULL) { + Handle<Object> value = isolate->factory()->undefined_value(); + if (!result->IsFound()) return value; switch (result->type()) { case NORMAL: - value = result->holder()->GetNormalizedProperty(result); - if (value->IsTheHole()) { - return heap->undefined_value(); - } - return value; - case FIELD: { - Object* value; - MaybeObject* maybe_value = - JSObject::cast(result->holder())->FastPropertyAt( - result->representation(), - result->GetFieldIndex().field_index()); - if (!maybe_value->To(&value)) return maybe_value; - if (value->IsTheHole()) { - return heap->undefined_value(); - } - return value; - } + value = JSObject::GetNormalizedProperty( + handle(result->holder(), isolate), result); + break; + case FIELD: + value = JSObject::FastPropertyAt(handle(result->holder(), isolate), + result->representation(), + result->GetFieldIndex().field_index()); + break; case CONSTANT: - return result->GetConstant(); + return handle(result->GetConstant(), isolate); case CALLBACKS: { - Object* structure = result->GetCallbackObject(); - if (structure->IsForeign() || structure->IsAccessorInfo()) { - Isolate* isolate = heap->isolate(); - HandleScope scope(isolate); - Handle<Object> value = JSObject::GetPropertyWithCallback( - handle(result->holder(), isolate), - handle(receiver, isolate), - handle(structure, isolate), - handle(name, isolate)); - if (value.is_null()) { - MaybeObject* exception = heap->isolate()->pending_exception(); - heap->isolate()->clear_pending_exception(); - if (caught_exception != NULL) *caught_exception = true; - return exception; + Handle<Object> structure(result->GetCallbackObject(), isolate); + ASSERT(!structure->IsForeign()); + if (structure->IsAccessorInfo()) { + MaybeHandle<Object> obj = JSObject::GetPropertyWithCallback( + handle(result->holder(), isolate), receiver, structure, name); + if (!obj.ToHandle(&value)) { + value = handle(isolate->pending_exception(), isolate); + isolate->clear_pending_exception(); + if (has_caught != NULL) *has_caught = true; + return value; } - return *value; - } else { - return heap->undefined_value(); } + break; } case INTERCEPTOR: - case TRANSITION: - return heap->undefined_value(); + break; case HANDLER: case NONEXISTENT: UNREACHABLE(); - return heap->undefined_value(); + break; } - UNREACHABLE(); // keep the compiler happy - return heap->undefined_value(); + ASSERT(!value->IsTheHole() || result->IsReadOnly()); + return value->IsTheHole() + ? Handle<Object>::cast(isolate->factory()->undefined_value()) : value; } @@ -10893,7 +10883,7 @@ static MaybeObject* DebugLookupResultValue(Heap* heap, // 4: Setter function if defined // Items 2-4 are only filled if the property has either a getter or a setter // defined through __defineGetter__ and/or __defineSetter__. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { +RUNTIME_FUNCTION(Runtime_DebugGetPropertyDetails) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -10924,9 +10914,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { uint32_t index; if (name->AsArrayIndex(&index)) { Handle<FixedArray> details = isolate->factory()->NewFixedArray(2); - Handle<Object> element_or_char = - Runtime::GetElementOrCharAt(isolate, obj, index); - RETURN_IF_EMPTY_HANDLE(isolate, element_or_char); + Handle<Object> element_or_char; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, element_or_char, + Runtime::GetElementOrCharAt(isolate, obj, index)); details->set(0, *element_or_char); details->set( 1, PropertyDetails(NONE, NORMAL, Representation::None()).AsSmi()); @@ -10940,7 +10931,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { Handle<JSObject> jsproto = obj; for (int i = 0; i < length; i++) { LookupResult result(isolate); - jsproto->LocalLookup(*name, &result); + jsproto->LocalLookup(name, &result); if (result.IsFound()) { // LookupResult is not GC safe as it holds raw object pointers. // GC can happen later in this code so put the required fields into @@ -10950,29 +10941,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { result_callback_obj = Handle<Object>(result.GetCallbackObject(), isolate); } - Smi* property_details = result.GetPropertyDetails().AsSmi(); - // DebugLookupResultValue can cause GC so details from LookupResult needs - // to be copied to handles before this. - bool caught_exception = false; - Object* raw_value; - { MaybeObject* maybe_raw_value = - DebugLookupResultValue(isolate->heap(), *obj, *name, - &result, &caught_exception); - if (!maybe_raw_value->ToObject(&raw_value)) return maybe_raw_value; - } - Handle<Object> value(raw_value, isolate); + + + bool has_caught = false; + Handle<Object> value = DebugLookupResultValue( + isolate, obj, name, &result, &has_caught); // If the callback object is a fixed array then it contains JavaScript // getter and/or setter. - bool hasJavaScriptAccessors = result.IsPropertyCallbacks() && - result_callback_obj->IsAccessorPair(); + bool has_js_accessors = result.IsPropertyCallbacks() && + result_callback_obj->IsAccessorPair(); Handle<FixedArray> details = - isolate->factory()->NewFixedArray(hasJavaScriptAccessors ? 5 : 2); + isolate->factory()->NewFixedArray(has_js_accessors ? 5 : 2); details->set(0, *value); - details->set(1, property_details); - if (hasJavaScriptAccessors) { + details->set(1, result.GetPropertyDetails().AsSmi()); + if (has_js_accessors) { AccessorPair* accessors = AccessorPair::cast(*result_callback_obj); - details->set(2, isolate->heap()->ToBoolean(caught_exception)); + details->set(2, isolate->heap()->ToBoolean(has_caught)); details->set(3, accessors->GetComponent(ACCESSOR_GETTER)); details->set(4, accessors->GetComponent(ACCESSOR_SETTER)); } @@ -10988,7 +10973,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) { +RUNTIME_FUNCTION(Runtime_DebugGetProperty) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -10997,17 +10982,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) { CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); LookupResult result(isolate); - obj->Lookup(*name, &result); - if (result.IsFound()) { - return DebugLookupResultValue(isolate->heap(), *obj, *name, &result, NULL); - } - return isolate->heap()->undefined_value(); + obj->Lookup(name, &result); + return *DebugLookupResultValue(isolate, obj, name, &result); } // Return the property type calculated from the property details. // args[0]: smi with property details. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyTypeFromDetails) { +RUNTIME_FUNCTION(Runtime_DebugPropertyTypeFromDetails) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_PROPERTY_DETAILS_CHECKED(details, 0); @@ -11017,7 +10999,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyTypeFromDetails) { // Return the property attribute calculated from the property details. // args[0]: smi with property details. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyAttributesFromDetails) { +RUNTIME_FUNCTION(Runtime_DebugPropertyAttributesFromDetails) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_PROPERTY_DETAILS_CHECKED(details, 0); @@ -11027,7 +11009,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyAttributesFromDetails) { // Return the property insertion index calculated from the property details. // args[0]: smi with property details. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyIndexFromDetails) { +RUNTIME_FUNCTION(Runtime_DebugPropertyIndexFromDetails) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_PROPERTY_DETAILS_CHECKED(details, 0); @@ -11039,7 +11021,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyIndexFromDetails) { // Return property value from named interceptor. // args[0]: object // args[1]: property name -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugNamedInterceptorPropertyValue) { +RUNTIME_FUNCTION(Runtime_DebugNamedInterceptorPropertyValue) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); @@ -11047,9 +11029,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugNamedInterceptorPropertyValue) { CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); PropertyAttributes attributes; - Handle<Object> result = - JSObject::GetPropertyWithInterceptor(obj, obj, name, &attributes); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::GetPropertyWithInterceptor(obj, obj, name, &attributes)); return *result; } @@ -11057,43 +11040,39 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugNamedInterceptorPropertyValue) { // Return element value from indexed interceptor. // args[0]: object // args[1]: index -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugIndexedInterceptorElementValue) { +RUNTIME_FUNCTION(Runtime_DebugIndexedInterceptorElementValue) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); RUNTIME_ASSERT(obj->HasIndexedInterceptor()); CONVERT_NUMBER_CHECKED(uint32_t, index, Uint32, args[1]); - Handle<Object> result = JSObject::GetElementWithInterceptor(obj, obj, index); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, JSObject::GetElementWithInterceptor(obj, obj, index)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckExecutionState) { +static bool CheckExecutionState(Isolate* isolate, int break_id) { + return (isolate->debug()->break_id() != 0 && + break_id == isolate->debug()->break_id()); +} + + +RUNTIME_FUNCTION(Runtime_CheckExecutionState) { SealHandleScope shs(isolate); - ASSERT(args.length() >= 1); + ASSERT(args.length() == 1); CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); - // Check that the break id is valid. - if (isolate->debug()->break_id() == 0 || - break_id != isolate->debug()->break_id()) { - return isolate->Throw( - isolate->heap()->illegal_execution_state_string()); - } - + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); return isolate->heap()->true_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameCount) { +RUNTIME_FUNCTION(Runtime_GetFrameCount) { HandleScope scope(isolate); ASSERT(args.length() == 1); - - // Check arguments. - Object* result; - { MaybeObject* maybe_result = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_result->ToObject(&result)) return maybe_result; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); // Count all frames which are relevant to debugging stack trace. int n = 0; @@ -11227,16 +11206,12 @@ static SaveContext* FindSavedContextForFrame(Isolate* isolate, // Arguments name, value // Locals name, value // Return value if any -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { +RUNTIME_FUNCTION(Runtime_GetFrameDetails) { HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]); Heap* heap = isolate->heap(); @@ -11284,33 +11259,44 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { ASSERT(*scope_info != ScopeInfo::Empty(isolate)); // Get the locals names and values into a temporary array. - // - // TODO(1240907): Hide compiler-introduced stack variables - // (e.g. .result)? For users of the debugger, they will probably be - // confusing. + int local_count = scope_info->LocalCount(); + for (int slot = 0; slot < scope_info->LocalCount(); ++slot) { + // Hide compiler-introduced temporary variables, whether on the stack or on + // the context. + if (scope_info->LocalIsSynthetic(slot)) + local_count--; + } + Handle<FixedArray> locals = - isolate->factory()->NewFixedArray(scope_info->LocalCount() * 2); + isolate->factory()->NewFixedArray(local_count * 2); // Fill in the values of the locals. + int local = 0; int i = 0; for (; i < scope_info->StackLocalCount(); ++i) { // Use the value from the stack. - locals->set(i * 2, scope_info->LocalName(i)); - locals->set(i * 2 + 1, frame_inspector.GetExpression(i)); + if (scope_info->LocalIsSynthetic(i)) + continue; + locals->set(local * 2, scope_info->LocalName(i)); + locals->set(local * 2 + 1, frame_inspector.GetExpression(i)); + local++; } - if (i < scope_info->LocalCount()) { + if (local < local_count) { // Get the context containing declarations. Handle<Context> context( Context::cast(it.frame()->context())->declaration_context()); for (; i < scope_info->LocalCount(); ++i) { + if (scope_info->LocalIsSynthetic(i)) + continue; Handle<String> name(scope_info->LocalName(i)); VariableMode mode; InitializationFlag init_flag; - locals->set(i * 2, *name); + locals->set(local * 2, *name); int context_slot_index = - scope_info->ContextSlotIndex(*name, &mode, &init_flag); + ScopeInfo::ContextSlotIndex(scope_info, name, &mode, &init_flag); Object* value = context->get(context_slot_index); - locals->set(i * 2 + 1, value); + locals->set(local * 2 + 1, value); + local++; } } @@ -11371,7 +11357,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { // Calculate the size of the result. int details_size = kFrameDetailsFirstDynamicIndex + - 2 * (argument_count + scope_info->LocalCount()) + + 2 * (argument_count + local_count) + (at_return ? 1 : 0); Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size); @@ -11386,7 +11372,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { // Add the locals count details->set(kFrameDetailsLocalCountIndex, - Smi::FromInt(scope_info->LocalCount())); + Smi::FromInt(local_count)); // Add the source position. if (position != RelocInfo::kNoPosition) { @@ -11437,7 +11423,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { } // Add locals name and value from the temporary copy from the function frame. - for (int i = 0; i < scope_info->LocalCount() * 2; i++) { + for (int i = 0; i < local_count * 2; i++) { details->set(details_index++, locals->get(i)); } @@ -11467,7 +11453,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { ASSERT(!receiver->IsNull()); Context* context = Context::cast(it.frame()->context()); Handle<Context> native_context(Context::cast(context->native_context())); - receiver = isolate->factory()->ToObject(receiver, native_context); + receiver = Object::ToObject( + isolate, receiver, native_context).ToHandleChecked(); } } details->set(kFrameDetailsReceiverIndex, *receiver); @@ -11478,16 +11465,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) { static bool ParameterIsShadowedByContextLocal(Handle<ScopeInfo> info, - int index) { + Handle<String> parameter_name) { VariableMode mode; InitializationFlag flag; - return info->ContextSlotIndex(info->ParameterName(index), &mode, &flag) != -1; + return ScopeInfo::ContextSlotIndex(info, parameter_name, &mode, &flag) != -1; } // Create a plain JSObject which materializes the local scope for the specified // frame. -static Handle<JSObject> MaterializeStackLocalsWithFrameInspector( +MUST_USE_RESULT +static MaybeHandle<JSObject> MaterializeStackLocalsWithFrameInspector( Isolate* isolate, Handle<JSObject> target, Handle<JSFunction> function, @@ -11498,7 +11486,8 @@ static Handle<JSObject> MaterializeStackLocalsWithFrameInspector( // First fill all parameters. for (int i = 0; i < scope_info->ParameterCount(); ++i) { // Do not materialize the parameter if it is shadowed by a context local. - if (ParameterIsShadowedByContextLocal(scope_info, i)) continue; + Handle<String> name(scope_info->ParameterName(i)); + if (ParameterIsShadowedByContextLocal(scope_info, name)) continue; HandleScope scope(isolate); Handle<Object> value(i < frame_inspector->GetParametersCount() @@ -11506,24 +11495,24 @@ static Handle<JSObject> MaterializeStackLocalsWithFrameInspector( : isolate->heap()->undefined_value(), isolate); ASSERT(!value->IsTheHole()); - Handle<String> name(scope_info->ParameterName(i)); - RETURN_IF_EMPTY_HANDLE_VALUE( + RETURN_ON_EXCEPTION( isolate, Runtime::SetObjectProperty(isolate, target, name, value, NONE, SLOPPY), - Handle<JSObject>()); + JSObject); } // Second fill all stack locals. for (int i = 0; i < scope_info->StackLocalCount(); ++i) { + if (scope_info->LocalIsSynthetic(i)) continue; Handle<String> name(scope_info->StackLocalName(i)); Handle<Object> value(frame_inspector->GetExpression(i), isolate); if (value->IsTheHole()) continue; - RETURN_IF_EMPTY_HANDLE_VALUE( + RETURN_ON_EXCEPTION( isolate, Runtime::SetObjectProperty(isolate, target, name, value, NONE, SLOPPY), - Handle<JSObject>()); + JSObject); } return target; @@ -11548,30 +11537,34 @@ static void UpdateStackLocalsFromMaterializedObject(Isolate* isolate, // Parameters. for (int i = 0; i < scope_info->ParameterCount(); ++i) { // Shadowed parameters were not materialized. - if (ParameterIsShadowedByContextLocal(scope_info, i)) continue; + Handle<String> name(scope_info->ParameterName(i)); + if (ParameterIsShadowedByContextLocal(scope_info, name)) continue; ASSERT(!frame->GetParameter(i)->IsTheHole()); HandleScope scope(isolate); - Handle<String> name(scope_info->ParameterName(i)); - Handle<Object> value = GetProperty(isolate, target, name); + Handle<Object> value = + Object::GetPropertyOrElement(target, name).ToHandleChecked(); frame->SetParameterValue(i, *value); } // Stack locals. for (int i = 0; i < scope_info->StackLocalCount(); ++i) { + if (scope_info->LocalIsSynthetic(i)) continue; if (frame->GetExpression(i)->IsTheHole()) continue; HandleScope scope(isolate); - Handle<Object> value = GetProperty( - isolate, target, Handle<String>(scope_info->StackLocalName(i))); + Handle<Object> value = Object::GetPropertyOrElement( + target, + handle(scope_info->StackLocalName(i), isolate)).ToHandleChecked(); frame->SetExpression(i, *value); } } -static Handle<JSObject> MaterializeLocalContext(Isolate* isolate, - Handle<JSObject> target, - Handle<JSFunction> function, - JavaScriptFrame* frame) { +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeLocalContext( + Isolate* isolate, + Handle<JSObject> target, + Handle<JSFunction> function, + JavaScriptFrame* frame) { HandleScope scope(isolate); Handle<SharedFunctionInfo> shared(function->shared()); Handle<ScopeInfo> scope_info(shared->scope_info()); @@ -11583,7 +11576,7 @@ static Handle<JSObject> MaterializeLocalContext(Isolate* isolate, Handle<Context> function_context(frame_context->declaration_context()); if (!ScopeInfo::CopyContextLocalsToScopeObject( scope_info, function_context, target)) { - return Handle<JSObject>(); + return MaybeHandle<JSObject>(); } // Finally copy any properties from the function context extension. @@ -11592,24 +11585,24 @@ static Handle<JSObject> MaterializeLocalContext(Isolate* isolate, if (function_context->has_extension() && !function_context->IsNativeContext()) { Handle<JSObject> ext(JSObject::cast(function_context->extension())); - bool threw = false; - Handle<FixedArray> keys = - GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw); - if (threw) return Handle<JSObject>(); + Handle<FixedArray> keys; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, keys, + JSReceiver::GetKeys(ext, JSReceiver::INCLUDE_PROTOS), + JSObject); for (int i = 0; i < keys->length(); i++) { // Names of variables introduced by eval are strings. ASSERT(keys->get(i)->IsString()); Handle<String> key(String::cast(keys->get(i))); - RETURN_IF_EMPTY_HANDLE_VALUE( + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, Object::GetPropertyOrElement(ext, key), JSObject); + RETURN_ON_EXCEPTION( isolate, - Runtime::SetObjectProperty(isolate, - target, - key, - GetProperty(isolate, ext, key), - NONE, - SLOPPY), - Handle<JSObject>()); + Runtime::SetObjectProperty( + isolate, target, key, value, NONE, SLOPPY), + JSObject); } } } @@ -11618,7 +11611,7 @@ static Handle<JSObject> MaterializeLocalContext(Isolate* isolate, } -static Handle<JSObject> MaterializeLocalScope( +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeLocalScope( Isolate* isolate, JavaScriptFrame* frame, int inlined_jsframe_index) { @@ -11627,9 +11620,11 @@ static Handle<JSObject> MaterializeLocalScope( Handle<JSObject> local_scope = isolate->factory()->NewJSObject(isolate->object_function()); - local_scope = MaterializeStackLocalsWithFrameInspector( - isolate, local_scope, function, &frame_inspector); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, local_scope, Handle<JSObject>()); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, local_scope, + MaterializeStackLocalsWithFrameInspector( + isolate, local_scope, function, &frame_inspector), + JSObject); return MaterializeLocalContext(isolate, local_scope, function, frame); } @@ -11643,11 +11638,11 @@ static bool SetContextLocalValue(Isolate* isolate, Handle<Object> new_value) { for (int i = 0; i < scope_info->ContextLocalCount(); i++) { Handle<String> next_name(scope_info->ContextLocalName(i)); - if (variable_name->Equals(*next_name)) { + if (String::Equals(variable_name, next_name)) { VariableMode mode; InitializationFlag init_flag; int context_index = - scope_info->ContextSlotIndex(*next_name, &mode, &init_flag); + ScopeInfo::ContextSlotIndex(scope_info, next_name, &mode, &init_flag); context->set(context_index, *new_value); return true; } @@ -11675,7 +11670,8 @@ static bool SetLocalVariableValue(Isolate* isolate, // Parameters. for (int i = 0; i < scope_info->ParameterCount(); ++i) { - if (scope_info->ParameterName(i)->Equals(*variable_name)) { + HandleScope scope(isolate); + if (String::Equals(handle(scope_info->ParameterName(i)), variable_name)) { frame->SetParameterValue(i, *new_value); // Argument might be shadowed in heap context, don't stop here. default_result = true; @@ -11684,7 +11680,8 @@ static bool SetLocalVariableValue(Isolate* isolate, // Stack locals. for (int i = 0; i < scope_info->StackLocalCount(); ++i) { - if (scope_info->StackLocalName(i)->Equals(*variable_name)) { + HandleScope scope(isolate); + if (String::Equals(handle(scope_info->StackLocalName(i)), variable_name)) { frame->SetExpression(i, *new_value); return true; } @@ -11709,7 +11706,7 @@ static bool SetLocalVariableValue(Isolate* isolate, // We don't expect this to do anything except replacing // property value. Runtime::SetObjectProperty(isolate, ext, variable_name, new_value, - NONE, SLOPPY); + NONE, SLOPPY).Assert(); return true; } } @@ -11722,8 +11719,9 @@ static bool SetLocalVariableValue(Isolate* isolate, // Create a plain JSObject which materializes the closure content for the // context. -static Handle<JSObject> MaterializeClosure(Isolate* isolate, - Handle<Context> context) { +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeClosure( + Isolate* isolate, + Handle<Context> context) { ASSERT(context->IsFunctionContext()); Handle<SharedFunctionInfo> shared(context->closure()->shared()); @@ -11737,28 +11735,31 @@ static Handle<JSObject> MaterializeClosure(Isolate* isolate, // Fill all context locals to the context extension. if (!ScopeInfo::CopyContextLocalsToScopeObject( scope_info, context, closure_scope)) { - return Handle<JSObject>(); + return MaybeHandle<JSObject>(); } // Finally copy any properties from the function context extension. This will // be variables introduced by eval. if (context->has_extension()) { Handle<JSObject> ext(JSObject::cast(context->extension())); - bool threw = false; - Handle<FixedArray> keys = - GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw); - if (threw) return Handle<JSObject>(); + Handle<FixedArray> keys; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, keys, + JSReceiver::GetKeys(ext, JSReceiver::INCLUDE_PROTOS), JSObject); for (int i = 0; i < keys->length(); i++) { + HandleScope scope(isolate); // Names of variables introduced by eval are strings. ASSERT(keys->get(i)->IsString()); Handle<String> key(String::cast(keys->get(i))); - RETURN_IF_EMPTY_HANDLE_VALUE( + Handle<Object> value; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, value, Object::GetPropertyOrElement(ext, key), JSObject); + RETURN_ON_EXCEPTION( isolate, - Runtime::SetObjectProperty(isolate, closure_scope, key, - GetProperty(isolate, ext, key), - NONE, SLOPPY), - Handle<JSObject>()); + Runtime::SetObjectProperty( + isolate, closure_scope, key, value, NONE, SLOPPY), + JSObject); } } @@ -11789,7 +11790,7 @@ static bool SetClosureVariableValue(Isolate* isolate, if (JSReceiver::HasProperty(ext, variable_name)) { // We don't expect this to do anything except replacing property value. Runtime::SetObjectProperty(isolate, ext, variable_name, new_value, - NONE, SLOPPY); + NONE, SLOPPY).Assert(); return true; } } @@ -11800,19 +11801,20 @@ static bool SetClosureVariableValue(Isolate* isolate, // Create a plain JSObject which materializes the scope for the specified // catch context. -static Handle<JSObject> MaterializeCatchScope(Isolate* isolate, - Handle<Context> context) { +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeCatchScope( + Isolate* isolate, + Handle<Context> context) { ASSERT(context->IsCatchContext()); Handle<String> name(String::cast(context->extension())); Handle<Object> thrown_object(context->get(Context::THROWN_OBJECT_INDEX), isolate); Handle<JSObject> catch_scope = isolate->factory()->NewJSObject(isolate->object_function()); - RETURN_IF_EMPTY_HANDLE_VALUE( + RETURN_ON_EXCEPTION( isolate, Runtime::SetObjectProperty(isolate, catch_scope, name, thrown_object, NONE, SLOPPY), - Handle<JSObject>()); + JSObject); return catch_scope; } @@ -11823,7 +11825,7 @@ static bool SetCatchVariableValue(Isolate* isolate, Handle<Object> new_value) { ASSERT(context->IsCatchContext()); Handle<String> name(String::cast(context->extension())); - if (!name->Equals(*variable_name)) { + if (!String::Equals(name, variable_name)) { return false; } context->set(Context::THROWN_OBJECT_INDEX, *new_value); @@ -11833,7 +11835,7 @@ static bool SetCatchVariableValue(Isolate* isolate, // Create a plain JSObject which materializes the block scope for the specified // block context. -static Handle<JSObject> MaterializeBlockScope( +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeBlockScope( Isolate* isolate, Handle<Context> context) { ASSERT(context->IsBlockContext()); @@ -11847,7 +11849,7 @@ static Handle<JSObject> MaterializeBlockScope( // Fill all context locals. if (!ScopeInfo::CopyContextLocalsToScopeObject( scope_info, context, block_scope)) { - return Handle<JSObject>(); + return MaybeHandle<JSObject>(); } return block_scope; @@ -11856,7 +11858,7 @@ static Handle<JSObject> MaterializeBlockScope( // Create a plain JSObject which materializes the module scope for the specified // module context. -static Handle<JSObject> MaterializeModuleScope( +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeModuleScope( Isolate* isolate, Handle<Context> context) { ASSERT(context->IsModuleContext()); @@ -11870,7 +11872,7 @@ static Handle<JSObject> MaterializeModuleScope( // Fill all context locals. if (!ScopeInfo::CopyContextLocalsToScopeObject( scope_info, context, module_scope)) { - return Handle<JSObject>(); + return MaybeHandle<JSObject>(); } return module_scope; @@ -12080,7 +12082,7 @@ class ScopeIterator { } // Return the JavaScript object with the content of the current scope. - Handle<JSObject> ScopeObject() { + MaybeHandle<JSObject> ScopeObject() { ASSERT(!failed_); switch (Type()) { case ScopeIterator::ScopeTypeGlobal: @@ -12243,16 +12245,12 @@ class ScopeIterator { }; -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeCount) { +RUNTIME_FUNCTION(Runtime_GetScopeCount) { HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); // Get the frame where the debugging is performed. @@ -12275,16 +12273,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeCount) { // Returns the list of step-in positions (text offset) in a function of the // stack frame in a range from the current debug break position to the end // of the corresponding statement. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetStepInPositions) { +RUNTIME_FUNCTION(Runtime_GetStepInPositions) { HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); // Get the frame where the debugging is performed. @@ -12335,9 +12329,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetStepInPositions) { if (accept) { if (break_location_iterator.IsStepInLocation(isolate)) { Smi* position_value = Smi::FromInt(break_location_iterator.position()); - JSObject::SetElement(array, len, - Handle<Object>(position_value, isolate), - NONE, SLOPPY); + RETURN_FAILURE_ON_EXCEPTION( + isolate, + JSObject::SetElement(array, len, + Handle<Object>(position_value, isolate), + NONE, SLOPPY)); len++; } } @@ -12357,7 +12353,8 @@ static const int kScopeDetailsObjectIndex = 1; static const int kScopeDetailsSize = 2; -static Handle<JSObject> MaterializeScopeDetails(Isolate* isolate, +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeScopeDetails( + Isolate* isolate, ScopeIterator* it) { // Calculate the size of the result. int details_size = kScopeDetailsSize; @@ -12365,8 +12362,9 @@ static Handle<JSObject> MaterializeScopeDetails(Isolate* isolate, // Fill in scope details. details->set(kScopeDetailsTypeIndex, Smi::FromInt(it->Type())); - Handle<JSObject> scope_object = it->ScopeObject(); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, scope_object, Handle<JSObject>()); + Handle<JSObject> scope_object; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, scope_object, it->ScopeObject(), JSObject); details->set(kScopeDetailsObjectIndex, *scope_object); return isolate->factory()->NewJSArrayWithElements(details); @@ -12382,16 +12380,12 @@ static Handle<JSObject> MaterializeScopeDetails(Isolate* isolate, // The array returned contains the following information: // 0: Scope type // 1: Scope object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) { +RUNTIME_FUNCTION(Runtime_GetScopeDetails) { HandleScope scope(isolate); ASSERT(args.length() == 4); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]); CONVERT_NUMBER_CHECKED(int, index, Int32, args[3]); @@ -12410,8 +12404,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) { if (it.Done()) { return isolate->heap()->undefined_value(); } - Handle<JSObject> details = MaterializeScopeDetails(isolate, &it); - RETURN_IF_EMPTY_HANDLE(isolate, details); + Handle<JSObject> details; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, details, MaterializeScopeDetails(isolate, &it)); return *details; } @@ -12425,16 +12420,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) { // The array returned contains arrays with the following information: // 0: Scope type // 1: Scope object -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetAllScopesDetails) { +RUNTIME_FUNCTION(Runtime_GetAllScopesDetails) { HandleScope scope(isolate); ASSERT(args.length() == 3 || args.length() == 4); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]); @@ -12452,8 +12443,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetAllScopesDetails) { List<Handle<JSObject> > result(4); ScopeIterator it(isolate, frame, inlined_jsframe_index, ignore_nested_scopes); for (; !it.Done(); it.Next()) { - Handle<JSObject> details = MaterializeScopeDetails(isolate, &it); - RETURN_IF_EMPTY_HANDLE(isolate, details); + Handle<JSObject> details; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, details, MaterializeScopeDetails(isolate, &it)); result.Add(details); } @@ -12465,7 +12457,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetAllScopesDetails) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeCount) { +RUNTIME_FUNCTION(Runtime_GetFunctionScopeCount) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -12482,7 +12474,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeCount) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeDetails) { +RUNTIME_FUNCTION(Runtime_GetFunctionScopeDetails) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -12500,8 +12492,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeDetails) { return isolate->heap()->undefined_value(); } - Handle<JSObject> details = MaterializeScopeDetails(isolate, &it); - RETURN_IF_EMPTY_HANDLE(isolate, details); + Handle<JSObject> details; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, details, MaterializeScopeDetails(isolate, &it)); return *details; } @@ -12528,22 +12521,20 @@ static bool SetScopeVariableValue(ScopeIterator* it, int index, // args[5]: object: new value // // Return true if success and false otherwise -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScopeVariableValue) { +RUNTIME_FUNCTION(Runtime_SetScopeVariableValue) { HandleScope scope(isolate); ASSERT(args.length() == 6); // Check arguments. CONVERT_NUMBER_CHECKED(int, index, Int32, args[3]); CONVERT_ARG_HANDLE_CHECKED(String, variable_name, 4); - Handle<Object> new_value = args.at<Object>(5); + CONVERT_ARG_HANDLE_CHECKED(Object, new_value, 5); bool res; if (args[0]->IsNumber()) { - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); + CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]); @@ -12564,7 +12555,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScopeVariableValue) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrintScopes) { +RUNTIME_FUNCTION(Runtime_DebugPrintScopes) { HandleScope scope(isolate); ASSERT(args.length() == 0); @@ -12582,16 +12573,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrintScopes) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetThreadCount) { +RUNTIME_FUNCTION(Runtime_GetThreadCount) { HandleScope scope(isolate); ASSERT(args.length() == 1); - - // Check arguments. - Object* result; - { MaybeObject* maybe_result = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_result->ToObject(&result)) return maybe_result; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); // Count all archived V8 threads. int n = 0; @@ -12618,16 +12604,12 @@ static const int kThreadDetailsSize = 2; // The array returned contains the following information: // 0: Is current thread? // 1: Thread id -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetThreadDetails) { +RUNTIME_FUNCTION(Runtime_GetThreadDetails) { HandleScope scope(isolate); ASSERT(args.length() == 2); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]); // Allocate array for result. @@ -12668,7 +12650,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetThreadDetails) { // Sets the disable break state // args[0]: disable break state -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDisableBreak) { +RUNTIME_FUNCTION(Runtime_SetDisableBreak) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 0); @@ -12682,7 +12664,7 @@ static bool IsPositionAlignmentCodeCorrect(int alignment) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetBreakLocations) { +RUNTIME_FUNCTION(Runtime_GetBreakLocations) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -12710,13 +12692,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetBreakLocations) { // args[0]: function // args[1]: number: break source position (within the function source) // args[2]: number: break point object -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFunctionBreakPoint) { +RUNTIME_FUNCTION(Runtime_SetFunctionBreakPoint) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]); RUNTIME_ASSERT(source_position >= 0); - Handle<Object> break_point_object_arg = args.at<Object>(2); + CONVERT_ARG_HANDLE_CHECKED(Object, break_point_object_arg, 2); // Set break point. isolate->debug()->SetBreakPoint(function, break_point_object_arg, @@ -12733,14 +12715,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFunctionBreakPoint) { // args[1]: number: break source position (within the script source) // args[2]: number, breakpoint position alignment // args[3]: number: break point object -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) { +RUNTIME_FUNCTION(Runtime_SetScriptBreakPoint) { HandleScope scope(isolate); ASSERT(args.length() == 4); CONVERT_ARG_HANDLE_CHECKED(JSValue, wrapper, 0); CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]); RUNTIME_ASSERT(source_position >= 0); CONVERT_NUMBER_CHECKED(int32_t, statement_aligned_code, Int32, args[2]); - Handle<Object> break_point_object_arg = args.at<Object>(3); + CONVERT_ARG_HANDLE_CHECKED(Object, break_point_object_arg, 3); if (!IsPositionAlignmentCodeCorrect(statement_aligned_code)) { return isolate->ThrowIllegalOperation(); @@ -12765,10 +12747,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) { // Clear a break point // args[0]: number: break point object -RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearBreakPoint) { +RUNTIME_FUNCTION(Runtime_ClearBreakPoint) { HandleScope scope(isolate); ASSERT(args.length() == 1); - Handle<Object> break_point_object_arg = args.at<Object>(0); + CONVERT_ARG_HANDLE_CHECKED(Object, break_point_object_arg, 0); // Clear break point. isolate->debug()->ClearBreakPoint(break_point_object_arg); @@ -12780,16 +12762,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearBreakPoint) { // Change the state of break on exceptions. // args[0]: Enum value indicating whether to affect caught/uncaught exceptions. // args[1]: Boolean indicating on/off. -RUNTIME_FUNCTION(MaybeObject*, Runtime_ChangeBreakOnException) { +RUNTIME_FUNCTION(Runtime_ChangeBreakOnException) { HandleScope scope(isolate); ASSERT(args.length() == 2); - RUNTIME_ASSERT(args[0]->IsNumber()); + CONVERT_NUMBER_CHECKED(uint32_t, type_arg, Uint32, args[0]); CONVERT_BOOLEAN_ARG_CHECKED(enable, 1); // If the number doesn't match an enum value, the ChangeBreakOnException // function will default to affecting caught exceptions. - ExceptionBreakType type = - static_cast<ExceptionBreakType>(NumberToUint32(args[0])); + ExceptionBreakType type = static_cast<ExceptionBreakType>(type_arg); // Update break point state. isolate->debug()->ChangeBreakOnException(type, enable); return isolate->heap()->undefined_value(); @@ -12798,13 +12779,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ChangeBreakOnException) { // Returns the state of break on exceptions // args[0]: boolean indicating uncaught exceptions -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsBreakOnException) { +RUNTIME_FUNCTION(Runtime_IsBreakOnException) { HandleScope scope(isolate); ASSERT(args.length() == 1); - RUNTIME_ASSERT(args[0]->IsNumber()); + CONVERT_NUMBER_CHECKED(uint32_t, type_arg, Uint32, args[0]); - ExceptionBreakType type = - static_cast<ExceptionBreakType>(NumberToUint32(args[0])); + ExceptionBreakType type = static_cast<ExceptionBreakType>(type_arg); bool result = isolate->debug()->IsBreakOnException(type); return Smi::FromInt(result); } @@ -12815,15 +12795,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsBreakOnException) { // args[1]: step action from the enumeration StepAction // args[2]: number of times to perform the step, for step out it is the number // of frames to step down. -RUNTIME_FUNCTION(MaybeObject*, Runtime_PrepareStep) { +RUNTIME_FUNCTION(Runtime_PrepareStep) { HandleScope scope(isolate); ASSERT(args.length() == 4); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); + if (!args[1]->IsNumber() || !args[2]->IsNumber()) { return isolate->Throw(isolate->heap()->illegal_argument_string()); } @@ -12870,7 +12847,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PrepareStep) { // Clear all stepping set by PrepareStep. -RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearStepping) { +RUNTIME_FUNCTION(Runtime_ClearStepping) { HandleScope scope(isolate); ASSERT(args.length() == 0); isolate->debug()->ClearStepping(); @@ -12880,7 +12857,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearStepping) { // Helper function to find or create the arguments object for // Runtime_DebugEvaluate. -static Handle<JSObject> MaterializeArgumentsObject( +MUST_USE_RESULT static MaybeHandle<JSObject> MaterializeArgumentsObject( Isolate* isolate, Handle<JSObject> target, Handle<JSFunction> function) { @@ -12895,40 +12872,43 @@ static Handle<JSObject> MaterializeArgumentsObject( // FunctionGetArguments can't throw an exception. Handle<JSObject> arguments = Handle<JSObject>::cast( Accessors::FunctionGetArguments(function)); - Runtime::SetObjectProperty(isolate, target, - isolate->factory()->arguments_string(), - arguments, - ::NONE, - SLOPPY); + Handle<String> arguments_str = isolate->factory()->arguments_string(); + RETURN_ON_EXCEPTION( + isolate, + Runtime::SetObjectProperty( + isolate, target, arguments_str, arguments, ::NONE, SLOPPY), + JSObject); return target; } // Compile and evaluate source for the given context. -static MaybeObject* DebugEvaluate(Isolate* isolate, - Handle<Context> context, - Handle<Object> context_extension, - Handle<Object> receiver, - Handle<String> source) { +static MaybeHandle<Object> DebugEvaluate(Isolate* isolate, + Handle<Context> context, + Handle<Object> context_extension, + Handle<Object> receiver, + Handle<String> source) { if (context_extension->IsJSObject()) { Handle<JSObject> extension = Handle<JSObject>::cast(context_extension); Handle<JSFunction> closure(context->closure(), isolate); context = isolate->factory()->NewWithContext(closure, context, extension); } - Handle<JSFunction> eval_fun = + Handle<JSFunction> eval_fun; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, eval_fun, Compiler::GetFunctionFromEval(source, context, SLOPPY, NO_PARSE_RESTRICTION, - RelocInfo::kNoPosition); - RETURN_IF_EMPTY_HANDLE(isolate, eval_fun); + RelocInfo::kNoPosition), + Object); - bool pending_exception; - Handle<Object> result = Execution::Call( - isolate, eval_fun, receiver, 0, NULL, &pending_exception); - - if (pending_exception) return Failure::Exception(); + Handle<Object> result; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, eval_fun, receiver, 0, NULL), + Object); // Skip the global proxy as it has no properties and always delegates to the // real global object. @@ -12938,7 +12918,7 @@ static MaybeObject* DebugEvaluate(Isolate* isolate, // Clear the oneshot breakpoints so that the debugger does not step further. isolate->debug()->ClearStepping(); - return *result; + return result; } @@ -12947,22 +12927,20 @@ static MaybeObject* DebugEvaluate(Isolate* isolate, // - Parameters and stack-allocated locals need to be materialized. Altered // values need to be written back to the stack afterwards. // - The arguments object needs to materialized. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) { +RUNTIME_FUNCTION(Runtime_DebugEvaluate) { HandleScope scope(isolate); // Check the execution state and decode arguments frame and source to be // evaluated. ASSERT(args.length() == 6); - Object* check_result; - { MaybeObject* maybe_result = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_result->ToObject(&check_result)) return maybe_result; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); + CONVERT_SMI_ARG_CHECKED(wrapped_id, 1); CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]); CONVERT_ARG_HANDLE_CHECKED(String, source, 3); CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 4); - Handle<Object> context_extension(args[5], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, context_extension, 5); // Handle the processing of break. DisableBreak disable_break_save(isolate, disable_break); @@ -12989,24 +12967,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) { Handle<JSObject> materialized = isolate->factory()->NewJSObject(isolate->object_function()); - materialized = MaterializeStackLocalsWithFrameInspector( - isolate, materialized, function, &frame_inspector); - RETURN_IF_EMPTY_HANDLE(isolate, materialized); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, materialized, + MaterializeStackLocalsWithFrameInspector( + isolate, materialized, function, &frame_inspector)); - materialized = MaterializeArgumentsObject(isolate, materialized, function); - RETURN_IF_EMPTY_HANDLE(isolate, materialized); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, materialized, + MaterializeArgumentsObject(isolate, materialized, function)); // Add the materialized object in a with-scope to shadow the stack locals. context = isolate->factory()->NewWithContext(function, context, materialized); Handle<Object> receiver(frame->receiver(), isolate); - Object* evaluate_result_object; - { MaybeObject* maybe_result = - DebugEvaluate(isolate, context, context_extension, receiver, source); - if (!maybe_result->ToObject(&evaluate_result_object)) return maybe_result; - } - - Handle<Object> result(evaluate_result_object, isolate); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + DebugEvaluate(isolate, context, context_extension, receiver, source)); // Write back potential changes to materialized stack locals to the stack. UpdateStackLocalsFromMaterializedObject( @@ -13016,20 +12993,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) { +RUNTIME_FUNCTION(Runtime_DebugEvaluateGlobal) { HandleScope scope(isolate); // Check the execution state and decode arguments frame and source to be // evaluated. ASSERT(args.length() == 4); - Object* check_result; - { MaybeObject* maybe_result = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_result->ToObject(&check_result)) return maybe_result; - } + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); + CONVERT_ARG_HANDLE_CHECKED(String, source, 1); CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 2); - Handle<Object> context_extension(args[3], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, context_extension, 3); // Handle the processing of break. DisableBreak disable_break_save(isolate, disable_break); @@ -13048,11 +13023,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) { // debugger was invoked. Handle<Context> context = isolate->native_context(); Handle<Object> receiver = isolate->global_object(); - return DebugEvaluate(isolate, context, context_extension, receiver, source); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + DebugEvaluate(isolate, context, context_extension, receiver, source)); + return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetLoadedScripts) { +RUNTIME_FUNCTION(Runtime_DebugGetLoadedScripts) { HandleScope scope(isolate); ASSERT(args.length() == 0); @@ -13067,7 +13046,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetLoadedScripts) { // instances->set(i, *GetScriptWrapper(script)) // is unsafe as GetScriptWrapper might call GC and the C++ compiler might // already have dereferenced the instances handle. - Handle<JSValue> wrapper = GetScriptWrapper(script); + Handle<JSObject> wrapper = Script::GetWrapper(script); instances->set(i, *wrapper); } @@ -13154,7 +13133,7 @@ static int DebugReferencedBy(HeapIterator* iterator, // args[0]: the object to find references to // args[1]: constructor function for instances to exclude (Mirror) // args[2]: the the maximum number of objects to return -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugReferencedBy) { +RUNTIME_FUNCTION(Runtime_DebugReferencedBy) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -13168,7 +13147,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugReferencedBy) { // Check parameters. CONVERT_ARG_HANDLE_CHECKED(JSObject, target, 0); - Handle<Object> instance_filter = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(Object, instance_filter, 1); RUNTIME_ASSERT(instance_filter->IsUndefined() || instance_filter->IsJSObject()); CONVERT_NUMBER_CHECKED(int32_t, max_references, Int32, args[2]); @@ -13244,7 +13223,7 @@ static int DebugConstructedBy(HeapIterator* iterator, // Scan the heap for objects constructed by a specific function. // args[0]: the constructor to find instances of // args[1]: the the maximum number of objects to return -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugConstructedBy) { +RUNTIME_FUNCTION(Runtime_DebugConstructedBy) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -13289,16 +13268,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugConstructedBy) { // Find the effective prototype object as returned by __proto__. // args[0]: the object to find the prototype for. -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPrototype) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_DebugGetPrototype) { + HandleScope shs(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSObject, obj, 0); - return GetPrototypeSkipHiddenPrototypes(isolate, obj); + CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0); + return *GetPrototypeSkipHiddenPrototypes(isolate, obj); } // Patches script source (should be called upon BeforeCompile event). -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugSetScriptSource) { +RUNTIME_FUNCTION(Runtime_DebugSetScriptSource) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -13316,7 +13295,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugSetScriptSource) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SystemBreak) { +RUNTIME_FUNCTION(Runtime_SystemBreak) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); OS::DebugBreak(); @@ -13324,14 +13303,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SystemBreak) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) { +RUNTIME_FUNCTION(Runtime_DebugDisassembleFunction) { HandleScope scope(isolate); #ifdef DEBUG ASSERT(args.length() == 1); // Get the function and make sure it is compiled. CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0); if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) { - return Failure::Exception(); + return isolate->heap()->exception(); } func->code()->PrintLn(); #endif // DEBUG @@ -13339,14 +13318,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) { +RUNTIME_FUNCTION(Runtime_DebugDisassembleConstructor) { HandleScope scope(isolate); #ifdef DEBUG ASSERT(args.length() == 1); // Get the function and make sure it is compiled. CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0); if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) { - return Failure::Exception(); + return isolate->heap()->exception(); } func->shared()->construct_stub()->PrintLn(); #endif // DEBUG @@ -13354,7 +13333,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetInferredName) { +RUNTIME_FUNCTION(Runtime_FunctionGetInferredName) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); @@ -13392,8 +13371,7 @@ static int FindSharedFunctionInfosForScript(HeapIterator* iterator, // For a script finds all SharedFunctionInfo's in the heap that points // to this script. Returns JSArray of SharedFunctionInfo wrapped // in OpaqueReferences. -RUNTIME_FUNCTION(MaybeObject*, - Runtime_LiveEditFindSharedFunctionInfosForScript) { +RUNTIME_FUNCTION(Runtime_LiveEditFindSharedFunctionInfosForScript) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 1); @@ -13442,7 +13420,7 @@ RUNTIME_FUNCTION(MaybeObject*, // Returns a JSArray of compilation infos. The array is ordered so that // each function with all its descendant is always stored in a continues range // with the function itself going first. The root function is a script function. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) { +RUNTIME_FUNCTION(Runtime_LiveEditGatherCompileInfo) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); @@ -13452,71 +13430,72 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) { RUNTIME_ASSERT(script->value()->IsScript()); Handle<Script> script_handle = Handle<Script>(Script::cast(script->value())); - JSArray* result = LiveEdit::GatherCompileInfo(script_handle, source); - - if (isolate->has_pending_exception()) { - return Failure::Exception(); - } - - return result; + Handle<JSArray> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, LiveEdit::GatherCompileInfo(script_handle, source)); + return *result; } // Changes the source of the script to a new_source. // If old_script_name is provided (i.e. is a String), also creates a copy of // the script with its original source and sends notification to debugger. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceScript) { +RUNTIME_FUNCTION(Runtime_LiveEditReplaceScript) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 3); CONVERT_ARG_CHECKED(JSValue, original_script_value, 0); CONVERT_ARG_HANDLE_CHECKED(String, new_source, 1); - Handle<Object> old_script_name(args[2], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, old_script_name, 2); RUNTIME_ASSERT(original_script_value->value()->IsScript()); Handle<Script> original_script(Script::cast(original_script_value->value())); - Object* old_script = LiveEdit::ChangeScriptSource(original_script, - new_source, - old_script_name); + Handle<Object> old_script = LiveEdit::ChangeScriptSource( + original_script, new_source, old_script_name); if (old_script->IsScript()) { - Handle<Script> script_handle(Script::cast(old_script)); - return *(GetScriptWrapper(script_handle)); + Handle<Script> script_handle = Handle<Script>::cast(old_script); + return *Script::GetWrapper(script_handle); } else { return isolate->heap()->null_value(); } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSourceUpdated) { +RUNTIME_FUNCTION(Runtime_LiveEditFunctionSourceUpdated) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_info, 0); - return LiveEdit::FunctionSourceUpdated(shared_info); + RUNTIME_ASSERT(SharedInfoWrapper::IsInstance(shared_info)); + + LiveEdit::FunctionSourceUpdated(shared_info); + return isolate->heap()->undefined_value(); } // Replaces code of SharedFunctionInfo with a new one. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceFunctionCode) { +RUNTIME_FUNCTION(Runtime_LiveEditReplaceFunctionCode) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, new_compile_info, 0); CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_info, 1); + RUNTIME_ASSERT(SharedInfoWrapper::IsInstance(shared_info)); - return LiveEdit::ReplaceFunctionCode(new_compile_info, shared_info); + LiveEdit::ReplaceFunctionCode(new_compile_info, shared_info); + return isolate->heap()->undefined_value(); } // Connects SharedFunctionInfo to another script. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSetScript) { +RUNTIME_FUNCTION(Runtime_LiveEditFunctionSetScript) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); - Handle<Object> function_object(args[0], isolate); - Handle<Object> script_object(args[1], isolate); + CONVERT_ARG_HANDLE_CHECKED(Object, function_object, 0); + CONVERT_ARG_HANDLE_CHECKED(Object, script_object, 1); if (function_object->IsJSValue()) { Handle<JSValue> function_wrapper = Handle<JSValue>::cast(function_object); @@ -13538,7 +13517,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSetScript) { // In a code of a parent function replaces original function as embedded object // with a substitution one. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) { +RUNTIME_FUNCTION(Runtime_LiveEditReplaceRefToNestedFunction) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 3); @@ -13546,10 +13525,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) { CONVERT_ARG_HANDLE_CHECKED(JSValue, parent_wrapper, 0); CONVERT_ARG_HANDLE_CHECKED(JSValue, orig_wrapper, 1); CONVERT_ARG_HANDLE_CHECKED(JSValue, subst_wrapper, 2); + RUNTIME_ASSERT(parent_wrapper->value()->IsSharedFunctionInfo()); + RUNTIME_ASSERT(orig_wrapper->value()->IsSharedFunctionInfo()); + RUNTIME_ASSERT(subst_wrapper->value()->IsSharedFunctionInfo()); - LiveEdit::ReplaceRefToNestedFunction(parent_wrapper, orig_wrapper, - subst_wrapper); - + LiveEdit::ReplaceRefToNestedFunction( + parent_wrapper, orig_wrapper, subst_wrapper); return isolate->heap()->undefined_value(); } @@ -13559,14 +13540,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) { // array of groups of 3 numbers: // (change_begin, change_end, change_end_new_position). // Each group describes a change in text; groups are sorted by change_begin. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) { +RUNTIME_FUNCTION(Runtime_LiveEditPatchFunctionPositions) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0); CONVERT_ARG_HANDLE_CHECKED(JSArray, position_change_array, 1); + RUNTIME_ASSERT(SharedInfoWrapper::IsInstance(shared_array)) - return LiveEdit::PatchFunctionPositions(shared_array, position_change_array); + LiveEdit::PatchFunctionPositions(shared_array, position_change_array); + return isolate->heap()->undefined_value(); } @@ -13574,12 +13557,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) { // checks that none of them have activations on stacks (of any thread). // Returns array of the same length with corresponding results of // LiveEdit::FunctionPatchabilityStatus type. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) { +RUNTIME_FUNCTION(Runtime_LiveEditCheckAndDropActivations) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0); CONVERT_BOOLEAN_ARG_CHECKED(do_drop, 1); + RUNTIME_ASSERT(shared_array->length()->IsSmi()); + int array_length = Smi::cast(shared_array->length())->value(); + for (int i = 0; i < array_length; i++) { + Handle<Object> element = + Object::GetElement(isolate, shared_array, i).ToHandleChecked(); + RUNTIME_ASSERT( + element->IsJSValue() && + Handle<JSValue>::cast(element)->value()->IsSharedFunctionInfo()); + } return *LiveEdit::CheckAndDropActivations(shared_array, do_drop); } @@ -13588,7 +13580,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) { // Compares 2 strings line-by-line, then token-wise and returns diff in form // of JSArray of triplets (pos1, pos1_end, pos2_end) describing list // of diff chunks. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) { +RUNTIME_FUNCTION(Runtime_LiveEditCompareStrings) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); @@ -13601,17 +13593,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) { // Restarts a call frame and completely drops all frames above. // Returns true if successful. Otherwise returns undefined or an error message. -RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditRestartFrame) { +RUNTIME_FUNCTION(Runtime_LiveEditRestartFrame) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); + CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]); + RUNTIME_ASSERT(CheckExecutionState(isolate, break_id)); - // Check arguments. - Object* check; - { MaybeObject* maybe_check = Runtime_CheckExecutionState( - RUNTIME_ARGUMENTS(isolate, args)); - if (!maybe_check->ToObject(&check)) return maybe_check; - } CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]); Heap* heap = isolate->heap(); @@ -13640,7 +13628,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditRestartFrame) { // A testing entry. Returns statement position which is the closest to // source_position. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionCodePositionFromSource) { +RUNTIME_FUNCTION(Runtime_GetFunctionCodePositionFromSource) { HandleScope scope(isolate); CHECK(isolate->debugger()->live_edit_enabled()); ASSERT(args.length() == 2); @@ -13678,43 +13666,37 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionCodePositionFromSource) { // Calls specified function with or without entering the debugger. // This is used in unit tests to run code as if debugger is entered or simply // to have a stack with C++ frame in the middle. -RUNTIME_FUNCTION(MaybeObject*, Runtime_ExecuteInDebugContext) { +RUNTIME_FUNCTION(Runtime_ExecuteInDebugContext) { HandleScope scope(isolate); ASSERT(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); CONVERT_BOOLEAN_ARG_CHECKED(without_debugger, 1); - Handle<Object> result; - bool pending_exception; - { - if (without_debugger) { - result = Execution::Call(isolate, - function, - isolate->global_object(), - 0, - NULL, - &pending_exception); - } else { - EnterDebugger enter_debugger(isolate); - result = Execution::Call(isolate, - function, - isolate->global_object(), - 0, - NULL, - &pending_exception); - } - } - if (!pending_exception) { - return *result; + MaybeHandle<Object> maybe_result; + if (without_debugger) { + maybe_result = Execution::Call(isolate, + function, + isolate->global_object(), + 0, + NULL); } else { - return Failure::Exception(); + EnterDebugger enter_debugger(isolate); + maybe_result = Execution::Call(isolate, + function, + isolate->global_object(), + 0, + NULL); } + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, maybe_result); + return *result; } // Sets a v8 flag. -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFlags) { +RUNTIME_FUNCTION(Runtime_SetFlags) { SealHandleScope shs(isolate); + ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(String, arg, 0); SmartArrayPointer<char> flags = arg->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); @@ -13725,16 +13707,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFlags) { // Performs a GC. // Presently, it only does a full GC. -RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectGarbage) { +RUNTIME_FUNCTION(Runtime_CollectGarbage) { SealHandleScope shs(isolate); + ASSERT(args.length() == 1); isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage"); return isolate->heap()->undefined_value(); } // Gets the current heap usage. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHeapUsage) { +RUNTIME_FUNCTION(Runtime_GetHeapUsage) { SealHandleScope shs(isolate); + ASSERT(args.length() == 0); int usage = static_cast<int>(isolate->heap()->SizeOfObjects()); if (!Smi::IsValid(usage)) { return *isolate->factory()->NewNumberFromInt(usage); @@ -13742,12 +13726,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHeapUsage) { return Smi::FromInt(usage); } -#endif // ENABLE_DEBUGGER_SUPPORT - #ifdef V8_I18N_SUPPORT -RUNTIME_FUNCTION(MaybeObject*, Runtime_CanonicalizeLanguageTag) { +RUNTIME_FUNCTION(Runtime_CanonicalizeLanguageTag) { HandleScope scope(isolate); + Factory* factory = isolate->factory(); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, locale_id_str, 0); @@ -13764,7 +13747,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CanonicalizeLanguageTag) { uloc_forLanguageTag(*locale_id, icu_result, ULOC_FULLNAME_CAPACITY, &icu_length, &error); if (U_FAILURE(error) || icu_length == 0) { - return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag)); + return *factory->NewStringFromAsciiChecked(kInvalidTag); } char result[ULOC_FULLNAME_CAPACITY]; @@ -13773,15 +13756,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CanonicalizeLanguageTag) { uloc_toLanguageTag(icu_result, result, ULOC_FULLNAME_CAPACITY, TRUE, &error); if (U_FAILURE(error)) { - return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag)); + return *factory->NewStringFromAsciiChecked(kInvalidTag); } - return isolate->heap()->AllocateStringFromOneByte(CStrVector(result)); + return *factory->NewStringFromAsciiChecked(result); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) { +RUNTIME_FUNCTION(Runtime_AvailableLocalesOf) { HandleScope scope(isolate); + Factory* factory = isolate->factory(); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, service, 0); @@ -13802,7 +13786,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) { UErrorCode error = U_ZERO_ERROR; char result[ULOC_FULLNAME_CAPACITY]; Handle<JSObject> locales = - isolate->factory()->NewJSObject(isolate->object_function()); + factory->NewJSObject(isolate->object_function()); for (int32_t i = 0; i < count; ++i) { const char* icu_name = available_locales[i].getName(); @@ -13815,11 +13799,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) { continue; } - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( locales, - isolate->factory()->NewStringFromAscii(CStrVector(result)), - isolate->factory()->NewNumber(i), + factory->NewStringFromAsciiChecked(result), + factory->NewNumber(i), NONE)); } @@ -13827,8 +13811,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultICULocale) { - SealHandleScope shs(isolate); +RUNTIME_FUNCTION(Runtime_GetDefaultICULocale) { + HandleScope scope(isolate); + Factory* factory = isolate->factory(); ASSERT(args.length() == 0); @@ -13840,31 +13825,34 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultICULocale) { uloc_toLanguageTag( default_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status); if (U_SUCCESS(status)) { - return isolate->heap()->AllocateStringFromOneByte(CStrVector(result)); + return *factory->NewStringFromAsciiChecked(result); } - return isolate->heap()->AllocateStringFromOneByte(CStrVector("und")); + return *factory->NewStringFromStaticAscii("und"); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLanguageTagVariants) { +RUNTIME_FUNCTION(Runtime_GetLanguageTagVariants) { HandleScope scope(isolate); + Factory* factory = isolate->factory(); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, input, 0); uint32_t length = static_cast<uint32_t>(input->length()->Number()); - Handle<FixedArray> output = isolate->factory()->NewFixedArray(length); - Handle<Name> maximized = - isolate->factory()->NewStringFromAscii(CStrVector("maximized")); - Handle<Name> base = - isolate->factory()->NewStringFromAscii(CStrVector("base")); + // Set some limit to prevent fuzz tests from going OOM. + // Can be bumped when callers' requirements change. + RUNTIME_ASSERT(length < 100); + Handle<FixedArray> output = factory->NewFixedArray(length); + Handle<Name> maximized = factory->NewStringFromStaticAscii("maximized"); + Handle<Name> base = factory->NewStringFromStaticAscii("base"); for (unsigned int i = 0; i < length; ++i) { - Handle<Object> locale_id = Object::GetElement(isolate, input, i); - RETURN_IF_EMPTY_HANDLE(isolate, locale_id); + Handle<Object> locale_id; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, locale_id, Object::GetElement(isolate, input, i)); if (!locale_id->IsString()) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); + return isolate->Throw(*factory->illegal_argument_string()); } v8::String::Utf8Value utf8_locale_id( @@ -13879,7 +13867,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLanguageTagVariants) { uloc_forLanguageTag(*utf8_locale_id, icu_locale, ULOC_FULLNAME_CAPACITY, &icu_locale_length, &error); if (U_FAILURE(error) || icu_locale_length == 0) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); + return isolate->Throw(*factory->illegal_argument_string()); } // Maximize the locale. @@ -13912,33 +13900,113 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLanguageTagVariants) { icu_base_locale, base_locale, ULOC_FULLNAME_CAPACITY, FALSE, &error); if (U_FAILURE(error)) { - return isolate->Throw(isolate->heap()->illegal_argument_string()); + return isolate->Throw(*factory->illegal_argument_string()); } - Handle<JSObject> result = - isolate->factory()->NewJSObject(isolate->object_function()); - RETURN_IF_EMPTY_HANDLE(isolate, + Handle<JSObject> result = factory->NewJSObject(isolate->object_function()); + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( result, maximized, - isolate->factory()->NewStringFromAscii(CStrVector(base_max_locale)), + factory->NewStringFromAsciiChecked(base_max_locale), NONE)); - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( result, base, - isolate->factory()->NewStringFromAscii(CStrVector(base_locale)), + factory->NewStringFromAsciiChecked(base_locale), NONE)); output->set(i, *result); } - Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(output); + Handle<JSArray> result = factory->NewJSArrayWithElements(output); result->set_length(Smi::FromInt(length)); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) { +RUNTIME_FUNCTION(Runtime_IsInitializedIntlObject) { + HandleScope scope(isolate); + + ASSERT(args.length() == 1); + + CONVERT_ARG_HANDLE_CHECKED(Object, input, 0); + + if (!input->IsJSObject()) return isolate->heap()->false_value(); + Handle<JSObject> obj = Handle<JSObject>::cast(input); + + Handle<String> marker = isolate->factory()->intl_initialized_marker_string(); + Handle<Object> tag(obj->GetHiddenProperty(marker), isolate); + return isolate->heap()->ToBoolean(!tag->IsTheHole()); +} + + +RUNTIME_FUNCTION(Runtime_IsInitializedIntlObjectOfType) { + HandleScope scope(isolate); + + ASSERT(args.length() == 2); + + CONVERT_ARG_HANDLE_CHECKED(Object, input, 0); + CONVERT_ARG_HANDLE_CHECKED(String, expected_type, 1); + + if (!input->IsJSObject()) return isolate->heap()->false_value(); + Handle<JSObject> obj = Handle<JSObject>::cast(input); + + Handle<String> marker = isolate->factory()->intl_initialized_marker_string(); + Handle<Object> tag(obj->GetHiddenProperty(marker), isolate); + return isolate->heap()->ToBoolean( + tag->IsString() && String::cast(*tag)->Equals(*expected_type)); +} + + +RUNTIME_FUNCTION(Runtime_MarkAsInitializedIntlObjectOfType) { + HandleScope scope(isolate); + + ASSERT(args.length() == 3); + + CONVERT_ARG_HANDLE_CHECKED(JSObject, input, 0); + CONVERT_ARG_HANDLE_CHECKED(String, type, 1); + CONVERT_ARG_HANDLE_CHECKED(JSObject, impl, 2); + + Handle<String> marker = isolate->factory()->intl_initialized_marker_string(); + JSObject::SetHiddenProperty(input, marker, type); + + marker = isolate->factory()->intl_impl_object_string(); + JSObject::SetHiddenProperty(input, marker, impl); + + return isolate->heap()->undefined_value(); +} + + +RUNTIME_FUNCTION(Runtime_GetImplFromInitializedIntlObject) { + HandleScope scope(isolate); + + ASSERT(args.length() == 1); + + CONVERT_ARG_HANDLE_CHECKED(Object, input, 0); + + if (!input->IsJSObject()) { + Vector< Handle<Object> > arguments = HandleVector(&input, 1); + Handle<Object> type_error = + isolate->factory()->NewTypeError("not_intl_object", arguments); + return isolate->Throw(*type_error); + } + + Handle<JSObject> obj = Handle<JSObject>::cast(input); + + Handle<String> marker = isolate->factory()->intl_impl_object_string(); + Handle<Object> impl(obj->GetHiddenProperty(marker), isolate); + if (impl->IsTheHole()) { + Vector< Handle<Object> > arguments = HandleVector(&obj, 1); + Handle<Object> type_error = + isolate->factory()->NewTypeError("not_intl_object", arguments); + return isolate->Throw(*type_error); + } + return *impl; +} + + +RUNTIME_FUNCTION(Runtime_CreateDateTimeFormat) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -13951,13 +14019,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) { I18N::GetTemplate(isolate); // Create an empty object wrapper. - bool has_pending_exception = false; - Handle<JSObject> local_object = Execution::InstantiateObject( - date_format_template, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<JSObject> local_object; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, local_object, + Execution::InstantiateObject(date_format_template)); // Set date time formatter as internal field of the resulting JS object. icu::SimpleDateFormat* date_format = DateFormat::InitializeDateTimeFormat( @@ -13967,11 +14032,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) { local_object->SetInternalField(0, reinterpret_cast<Smi*>(date_format)); - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( local_object, - isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")), - isolate->factory()->NewStringFromAscii(CStrVector("valid")), + isolate->factory()->NewStringFromStaticAscii("dateFormat"), + isolate->factory()->NewStringFromStaticAscii("valid"), NONE)); // Make object handle weak so we can delete the data format once GC kicks in. @@ -13983,7 +14048,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) { +RUNTIME_FUNCTION(Runtime_InternalDateFormat) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -13991,13 +14056,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) { CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0); CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1); - bool has_pending_exception = false; - Handle<Object> value = - Execution::ToNumber(isolate, date, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<Object> value; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, value, Execution::ToNumber(isolate, date)); icu::SimpleDateFormat* date_format = DateFormat::UnpackDateFormat(isolate, date_format_holder); @@ -14006,14 +14067,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) { icu::UnicodeString result; date_format->format(value->Number(), result); - return *isolate->factory()->NewStringFromTwoByte( - Vector<const uint16_t>( - reinterpret_cast<const uint16_t*>(result.getBuffer()), - result.length())); + Handle<String> result_str; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result_str, + isolate->factory()->NewStringFromTwoByte( + Vector<const uint16_t>( + reinterpret_cast<const uint16_t*>(result.getBuffer()), + result.length()))); + return *result_str; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateParse) { +RUNTIME_FUNCTION(Runtime_InternalDateParse) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -14031,19 +14096,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateParse) { UDate date = date_format->parse(u_date, status); if (U_FAILURE(status)) return isolate->heap()->undefined_value(); - bool has_pending_exception = false; - Handle<JSDate> result = Handle<JSDate>::cast( - Execution::NewDate( - isolate, static_cast<double>(date), &has_pending_exception)); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::NewDate(isolate, static_cast<double>(date))); + ASSERT(result->IsJSDate()); return *result; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateNumberFormat) { +RUNTIME_FUNCTION(Runtime_CreateNumberFormat) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -14056,13 +14118,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateNumberFormat) { I18N::GetTemplate(isolate); // Create an empty object wrapper. - bool has_pending_exception = false; - Handle<JSObject> local_object = Execution::InstantiateObject( - number_format_template, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<JSObject> local_object; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, local_object, + Execution::InstantiateObject(number_format_template)); // Set number formatter as internal field of the resulting JS object. icu::DecimalFormat* number_format = NumberFormat::InitializeNumberFormat( @@ -14072,11 +14131,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateNumberFormat) { local_object->SetInternalField(0, reinterpret_cast<Smi*>(number_format)); - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( local_object, - isolate->factory()->NewStringFromAscii(CStrVector("numberFormat")), - isolate->factory()->NewStringFromAscii(CStrVector("valid")), + isolate->factory()->NewStringFromStaticAscii("numberFormat"), + isolate->factory()->NewStringFromStaticAscii("valid"), NONE)); Handle<Object> wrapper = isolate->global_handles()->Create(*local_object); @@ -14087,7 +14146,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateNumberFormat) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) { +RUNTIME_FUNCTION(Runtime_InternalNumberFormat) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -14095,13 +14154,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) { CONVERT_ARG_HANDLE_CHECKED(JSObject, number_format_holder, 0); CONVERT_ARG_HANDLE_CHECKED(Object, number, 1); - bool has_pending_exception = false; - Handle<Object> value = Execution::ToNumber( - isolate, number, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<Object> value; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, value, Execution::ToNumber(isolate, number)); icu::DecimalFormat* number_format = NumberFormat::UnpackNumberFormat(isolate, number_format_holder); @@ -14110,14 +14165,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) { icu::UnicodeString result; number_format->format(value->Number(), result); - return *isolate->factory()->NewStringFromTwoByte( - Vector<const uint16_t>( - reinterpret_cast<const uint16_t*>(result.getBuffer()), - result.length())); + Handle<String> result_str; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result_str, + isolate->factory()->NewStringFromTwoByte( + Vector<const uint16_t>( + reinterpret_cast<const uint16_t*>(result.getBuffer()), + result.length()))); + return *result_str; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberParse) { +RUNTIME_FUNCTION(Runtime_InternalNumberParse) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -14156,7 +14215,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberParse) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateCollator) { +RUNTIME_FUNCTION(Runtime_CreateCollator) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -14168,13 +14227,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateCollator) { Handle<ObjectTemplateInfo> collator_template = I18N::GetTemplate(isolate); // Create an empty object wrapper. - bool has_pending_exception = false; - Handle<JSObject> local_object = Execution::InstantiateObject( - collator_template, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<JSObject> local_object; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, local_object, Execution::InstantiateObject(collator_template)); // Set collator as internal field of the resulting JS object. icu::Collator* collator = Collator::InitializeCollator( @@ -14184,11 +14239,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateCollator) { local_object->SetInternalField(0, reinterpret_cast<Smi*>(collator)); - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( local_object, - isolate->factory()->NewStringFromAscii(CStrVector("collator")), - isolate->factory()->NewStringFromAscii(CStrVector("valid")), + isolate->factory()->NewStringFromStaticAscii("collator"), + isolate->factory()->NewStringFromStaticAscii("valid"), NONE)); Handle<Object> wrapper = isolate->global_handles()->Create(*local_object); @@ -14199,7 +14254,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateCollator) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalCompare) { +RUNTIME_FUNCTION(Runtime_InternalCompare) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -14227,7 +14282,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalCompare) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_StringNormalize) { +RUNTIME_FUNCTION(Runtime_StringNormalize) { HandleScope scope(isolate); static const UNormalizationMode normalizationForms[] = { UNORM_NFC, UNORM_NFD, UNORM_NFKC, UNORM_NFKD }; @@ -14236,6 +14291,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringNormalize) { CONVERT_ARG_HANDLE_CHECKED(String, stringValue, 0); CONVERT_NUMBER_CHECKED(int, form_id, Int32, args[1]); + RUNTIME_ASSERT(form_id >= 0 && + static_cast<size_t>(form_id) < ARRAY_SIZE(normalizationForms)); v8::String::Value string_value(v8::Utils::ToLocal(stringValue)); const UChar* u_value = reinterpret_cast<const UChar*>(*string_value); @@ -14249,14 +14306,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringNormalize) { return isolate->heap()->undefined_value(); } - return *isolate->factory()->NewStringFromTwoByte( - Vector<const uint16_t>( - reinterpret_cast<const uint16_t*>(result.getBuffer()), - result.length())); + Handle<String> result_str; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result_str, + isolate->factory()->NewStringFromTwoByte( + Vector<const uint16_t>( + reinterpret_cast<const uint16_t*>(result.getBuffer()), + result.length()))); + return *result_str; } -RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateBreakIterator) { +RUNTIME_FUNCTION(Runtime_CreateBreakIterator) { HandleScope scope(isolate); ASSERT(args.length() == 3); @@ -14269,13 +14330,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateBreakIterator) { I18N::GetTemplate2(isolate); // Create an empty object wrapper. - bool has_pending_exception = false; - Handle<JSObject> local_object = Execution::InstantiateObject( - break_iterator_template, &has_pending_exception); - if (has_pending_exception) { - ASSERT(isolate->has_pending_exception()); - return Failure::Exception(); - } + Handle<JSObject> local_object; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, local_object, + Execution::InstantiateObject(break_iterator_template)); // Set break iterator as internal field of the resulting JS object. icu::BreakIterator* break_iterator = BreakIterator::InitializeBreakIterator( @@ -14287,11 +14345,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateBreakIterator) { // Make sure that the pointer to adopted text is NULL. local_object->SetInternalField(1, reinterpret_cast<Smi*>(NULL)); - RETURN_IF_EMPTY_HANDLE(isolate, + RETURN_FAILURE_ON_EXCEPTION(isolate, JSObject::SetLocalPropertyIgnoreAttributes( local_object, - isolate->factory()->NewStringFromAscii(CStrVector("breakIterator")), - isolate->factory()->NewStringFromAscii(CStrVector("valid")), + isolate->factory()->NewStringFromStaticAscii("breakIterator"), + isolate->factory()->NewStringFromStaticAscii("valid"), NONE)); // Make object handle weak so we can delete the break iterator once GC kicks @@ -14304,7 +14362,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateBreakIterator) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorAdoptText) { +RUNTIME_FUNCTION(Runtime_BreakIteratorAdoptText) { HandleScope scope(isolate); ASSERT(args.length() == 2); @@ -14331,7 +14389,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorAdoptText) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorFirst) { +RUNTIME_FUNCTION(Runtime_BreakIteratorFirst) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -14346,7 +14404,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorFirst) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorNext) { +RUNTIME_FUNCTION(Runtime_BreakIteratorNext) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -14361,7 +14419,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorNext) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorCurrent) { +RUNTIME_FUNCTION(Runtime_BreakIteratorCurrent) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -14376,7 +14434,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorCurrent) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorBreakType) { +RUNTIME_FUNCTION(Runtime_BreakIteratorBreakType) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -14393,17 +14451,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_BreakIteratorBreakType) { int32_t status = rule_based_iterator->getRuleStatus(); // Keep return values in sync with JavaScript BreakType enum. if (status >= UBRK_WORD_NONE && status < UBRK_WORD_NONE_LIMIT) { - return *isolate->factory()->NewStringFromAscii(CStrVector("none")); + return *isolate->factory()->NewStringFromStaticAscii("none"); } else if (status >= UBRK_WORD_NUMBER && status < UBRK_WORD_NUMBER_LIMIT) { - return *isolate->factory()->NewStringFromAscii(CStrVector("number")); + return *isolate->factory()->NewStringFromStaticAscii("number"); } else if (status >= UBRK_WORD_LETTER && status < UBRK_WORD_LETTER_LIMIT) { - return *isolate->factory()->NewStringFromAscii(CStrVector("letter")); + return *isolate->factory()->NewStringFromStaticAscii("letter"); } else if (status >= UBRK_WORD_KANA && status < UBRK_WORD_KANA_LIMIT) { - return *isolate->factory()->NewStringFromAscii(CStrVector("kana")); + return *isolate->factory()->NewStringFromStaticAscii("kana"); } else if (status >= UBRK_WORD_IDEO && status < UBRK_WORD_IDEO_LIMIT) { - return *isolate->factory()->NewStringFromAscii(CStrVector("ideo")); + return *isolate->factory()->NewStringFromStaticAscii("ideo"); } else { - return *isolate->factory()->NewStringFromAscii(CStrVector("unknown")); + return *isolate->factory()->NewStringFromStaticAscii("unknown"); } } #endif // V8_I18N_SUPPORT @@ -14441,14 +14499,14 @@ static Handle<Object> Runtime_GetScriptFromScriptName( if (script.is_null()) return factory->undefined_value(); // Return the script found. - return GetScriptWrapper(script); + return Script::GetWrapper(script); } // Get the script object from script data. NOTE: Regarding performance // see the NOTE for GetScriptFromScriptData. // args[0]: script data for the script to find the source for -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScript) { +RUNTIME_FUNCTION(Runtime_GetScript) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -14465,11 +14523,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScript) { // Collect the raw data for a stack trace. Returns an array of 4 // element segments each containing a receiver, function, code and // native code offset. -RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) { +RUNTIME_FUNCTION(Runtime_CollectStackTrace) { HandleScope scope(isolate); - ASSERT_EQ(args.length(), 3); + ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSObject, error_object, 0); - Handle<Object> caller = args.at<Object>(1); + CONVERT_ARG_HANDLE_CHECKED(Object, caller, 1); CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[2]); // Optionally capture a more detailed stack trace for the message. @@ -14481,12 +14539,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) { // Retrieve the stack trace. This is the raw stack trace that yet has to // be formatted. Since we only need this once, clear it afterwards. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetAndClearOverflowedStackTrace) { +RUNTIME_FUNCTION(Runtime_GetAndClearOverflowedStackTrace) { HandleScope scope(isolate); - ASSERT_EQ(args.length(), 1); + ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, error_object, 0); Handle<String> key = isolate->factory()->hidden_stack_trace_string(); - Handle<Object> result(error_object->GetHiddenProperty(*key), isolate); + Handle<Object> result(error_object->GetHiddenProperty(key), isolate); if (result->IsTheHole()) return isolate->heap()->undefined_value(); RUNTIME_ASSERT(result->IsJSArray() || result->IsUndefined()); JSObject::DeleteHiddenProperty(error_object, key); @@ -14495,18 +14553,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetAndClearOverflowedStackTrace) { // Returns V8 version as a string. -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetV8Version) { - SealHandleScope shs(isolate); - ASSERT_EQ(args.length(), 0); +RUNTIME_FUNCTION(Runtime_GetV8Version) { + HandleScope scope(isolate); + ASSERT(args.length() == 0); const char* version_string = v8::V8::GetVersion(); - return isolate->heap()->AllocateStringFromOneByte(CStrVector(version_string), - NOT_TENURED); + return *isolate->factory()->NewStringFromAsciiChecked(version_string); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_Abort) { +RUNTIME_FUNCTION(Runtime_Abort) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_SMI_ARG_CHECKED(message_id, 0); @@ -14520,7 +14577,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Abort) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_AbortJS) { +RUNTIME_FUNCTION(Runtime_AbortJS) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, message, 0); @@ -14532,16 +14589,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AbortJS) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FlattenString) { +RUNTIME_FUNCTION(Runtime_FlattenString) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(String, str, 0); - FlattenString(str); - return isolate->heap()->undefined_value(); + return *String::Flatten(str); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyContextDisposed) { +RUNTIME_FUNCTION(Runtime_NotifyContextDisposed) { HandleScope scope(isolate); ASSERT(args.length() == 0); isolate->heap()->NotifyContextDisposed(); @@ -14549,7 +14605,25 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyContextDisposed) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TryMigrateInstance) { +RUNTIME_FUNCTION(Runtime_LoadMutableDouble) { + HandleScope scope(isolate); + ASSERT(args.length() == 2); + CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); + CONVERT_ARG_HANDLE_CHECKED(Smi, index, 1); + int idx = index->value() >> 1; + int inobject_properties = object->map()->inobject_properties(); + if (idx < 0) { + idx = -idx + inobject_properties - 1; + } + int max_idx = object->properties()->length() + inobject_properties; + RUNTIME_ASSERT(idx < max_idx); + Handle<Object> raw_value(object->RawFastPropertyAt(idx), isolate); + RUNTIME_ASSERT(raw_value->IsNumber() || raw_value->IsUninitialized()); + return *Object::NewStorageFor(isolate, raw_value, Representation::Double()); +} + + +RUNTIME_FUNCTION(Runtime_TryMigrateInstance) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); @@ -14560,43 +14634,46 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TryMigrateInstance) { // code where we can't handle lazy deopts for lack of a suitable bailout // ID. So we just try migration and signal failure if necessary, // which will also trigger a deopt. - Handle<Object> result = JSObject::TryMigrateInstance(js_object); - if (result.is_null()) return Smi::FromInt(0); + if (!JSObject::TryMigrateInstance(js_object)) return Smi::FromInt(0); return *object; } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_GetFromCache) { +RUNTIME_FUNCTION(RuntimeHidden_GetFromCache) { SealHandleScope shs(isolate); // This is only called from codegen, so checks might be more lax. CONVERT_ARG_CHECKED(JSFunctionResultCache, cache, 0); - Object* key = args[1]; + CONVERT_ARG_CHECKED(Object, key, 1); - int finger_index = cache->finger_index(); - Object* o = cache->get(finger_index); - if (o == key) { - // The fastest case: hit the same place again. - return cache->get(finger_index + 1); - } + { + DisallowHeapAllocation no_alloc; - for (int i = finger_index - 2; - i >= JSFunctionResultCache::kEntriesIndex; - i -= 2) { - o = cache->get(i); + int finger_index = cache->finger_index(); + Object* o = cache->get(finger_index); if (o == key) { - cache->set_finger_index(i); - return cache->get(i + 1); + // The fastest case: hit the same place again. + return cache->get(finger_index + 1); } - } - int size = cache->size(); - ASSERT(size <= cache->length()); + for (int i = finger_index - 2; + i >= JSFunctionResultCache::kEntriesIndex; + i -= 2) { + o = cache->get(i); + if (o == key) { + cache->set_finger_index(i); + return cache->get(i + 1); + } + } - for (int i = size - 2; i > finger_index; i -= 2) { - o = cache->get(i); - if (o == key) { - cache->set_finger_index(i); - return cache->get(i + 1); + int size = cache->size(); + ASSERT(size <= cache->length()); + + for (int i = size - 2; i > finger_index; i -= 2) { + o = cache->get(i); + if (o == key) { + cache->set_finger_index(i); + return cache->get(i + 1); + } } } @@ -14614,14 +14691,9 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_GetFromCache) { isolate); // This handle is nor shared, nor used later, so it's safe. Handle<Object> argv[] = { key_handle }; - bool pending_exception; - value = Execution::Call(isolate, - factory, - receiver, - ARRAY_SIZE(argv), - argv, - &pending_exception); - if (pending_exception) return Failure::Exception(); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, value, + Execution::Call(isolate, factory, receiver, ARRAY_SIZE(argv), argv)); } #ifdef VERIFY_HEAP @@ -14631,8 +14703,8 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_GetFromCache) { #endif // Function invocation may have cleared the cache. Reread all the data. - finger_index = cache_handle->finger_index(); - size = cache_handle->size(); + int finger_index = cache_handle->finger_index(); + int size = cache_handle->size(); // If we have spare room, put new data into it, otherwise evict post finger // entry which is likely to be the least recently used. @@ -14665,15 +14737,17 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_GetFromCache) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetStartPosition) { +RUNTIME_FUNCTION(Runtime_MessageGetStartPosition) { SealHandleScope shs(isolate); + ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSMessageObject, message, 0); return Smi::FromInt(message->start_position()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetScript) { +RUNTIME_FUNCTION(Runtime_MessageGetScript) { SealHandleScope shs(isolate); + ASSERT(args.length() == 1); CONVERT_ARG_CHECKED(JSMessageObject, message, 0); return message->script(); } @@ -14682,7 +14756,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetScript) { #ifdef DEBUG // ListNatives is ONLY used by the fuzz-natives.js in debug mode // Exclude the code in release mode. -RUNTIME_FUNCTION(MaybeObject*, Runtime_ListNatives) { +RUNTIME_FUNCTION(Runtime_ListNatives) { HandleScope scope(isolate); ASSERT(args.length() == 0); #define COUNT_ENTRY(Name, argc, ressize) + 1 @@ -14701,11 +14775,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ListNatives) { Handle<String> name; \ /* Inline runtime functions have an underscore in front of the name. */ \ if (inline_runtime_functions) { \ - name = factory->NewStringFromAscii( \ - Vector<const char>("_" #Name, StrLength("_" #Name))); \ + name = factory->NewStringFromStaticAscii("_" #Name); \ } else { \ - name = factory->NewStringFromAscii( \ - Vector<const char>(#Name, StrLength(#Name))); \ + name = factory->NewStringFromStaticAscii(#Name); \ } \ Handle<FixedArray> pair_elements = factory->NewFixedArray(2); \ pair_elements->set(0, *name); \ @@ -14727,28 +14799,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ListNatives) { #endif -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_Log) { - HandleScope handle_scope(isolate); - ASSERT(args.length() == 2); - CONVERT_ARG_HANDLE_CHECKED(String, format, 0); - CONVERT_ARG_HANDLE_CHECKED(JSArray, elms, 1); - - SmartArrayPointer<char> format_chars = format->ToCString(); - isolate->logger()->LogRuntime( - Vector<const char>(format_chars.get(), format->length()), elms); - return isolate->heap()->undefined_value(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_IS_VAR) { +RUNTIME_FUNCTION(Runtime_IS_VAR) { UNREACHABLE(); // implemented as macro in the parser return NULL; } #define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_Has##Name) { \ - CONVERT_ARG_CHECKED(JSObject, obj, 0); \ + RUNTIME_FUNCTION(Runtime_Has##Name) { \ + CONVERT_ARG_CHECKED(JSObject, obj, 0); \ return isolate->heap()->ToBoolean(obj->Has##Name()); \ } @@ -14767,7 +14826,7 @@ ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastProperties) #define TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype, size) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_HasExternal##Type##Elements) { \ + RUNTIME_FUNCTION(Runtime_HasExternal##Type##Elements) { \ CONVERT_ARG_CHECKED(JSObject, obj, 0); \ return isolate->heap()->ToBoolean(obj->HasExternal##Type##Elements()); \ } @@ -14778,7 +14837,7 @@ TYPED_ARRAYS(TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION) #define FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype, s) \ - RUNTIME_FUNCTION(MaybeObject*, Runtime_HasFixed##Type##Elements) { \ + RUNTIME_FUNCTION(Runtime_HasFixed##Type##Elements) { \ CONVERT_ARG_CHECKED(JSObject, obj, 0); \ return isolate->heap()->ToBoolean(obj->HasFixed##Type##Elements()); \ } @@ -14788,7 +14847,7 @@ TYPED_ARRAYS(FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION) #undef FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION -RUNTIME_FUNCTION(MaybeObject*, Runtime_HaveSameMap) { +RUNTIME_FUNCTION(Runtime_HaveSameMap) { SealHandleScope shs(isolate); ASSERT(args.length() == 2); CONVERT_ARG_CHECKED(JSObject, obj1, 0); @@ -14797,52 +14856,40 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HaveSameMap) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsAccessCheckNeeded) { +RUNTIME_FUNCTION(Runtime_IsJSGlobalProxy) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(HeapObject, obj, 0); - return isolate->heap()->ToBoolean(obj->IsAccessCheckNeeded()); + CONVERT_ARG_CHECKED(Object, obj, 0); + return isolate->heap()->ToBoolean(obj->IsJSGlobalProxy()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsObserved) { +RUNTIME_FUNCTION(Runtime_IsObserved) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); if (!args[0]->IsJSReceiver()) return isolate->heap()->false_value(); - JSReceiver* obj = JSReceiver::cast(args[0]); - if (obj->IsJSGlobalProxy()) { - Object* proto = obj->GetPrototype(); - if (proto->IsNull()) return isolate->heap()->false_value(); - ASSERT(proto->IsJSGlobalObject()); - obj = JSReceiver::cast(proto); - } + CONVERT_ARG_CHECKED(JSReceiver, obj, 0); + ASSERT(!obj->IsJSGlobalProxy() || !obj->map()->is_observed()); return isolate->heap()->ToBoolean(obj->map()->is_observed()); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetIsObserved) { +RUNTIME_FUNCTION(Runtime_SetIsObserved) { HandleScope scope(isolate); ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSReceiver, obj, 0); - if (obj->IsJSGlobalProxy()) { - Object* proto = obj->GetPrototype(); - if (proto->IsNull()) return isolate->heap()->undefined_value(); - ASSERT(proto->IsJSGlobalObject()); - obj = handle(JSReceiver::cast(proto)); - } + ASSERT(!obj->IsJSGlobalProxy()); if (obj->IsJSProxy()) return isolate->heap()->undefined_value(); - ASSERT(!(obj->map()->is_observed() && obj->IsJSObject() && - Handle<JSObject>::cast(obj)->HasFastElements())); ASSERT(obj->IsJSObject()); JSObject::SetObserved(Handle<JSObject>::cast(obj)); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetMicrotaskPending) { +RUNTIME_FUNCTION(Runtime_SetMicrotaskPending) { SealHandleScope shs(isolate); ASSERT(args.length() == 1); CONVERT_BOOLEAN_ARG_CHECKED(new_state, 0); @@ -14852,30 +14899,29 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetMicrotaskPending) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_RunMicrotasks) { +RUNTIME_FUNCTION(Runtime_RunMicrotasks) { HandleScope scope(isolate); ASSERT(args.length() == 0); - if (isolate->microtask_pending()) - Execution::RunMicrotasks(isolate); + if (isolate->microtask_pending()) Execution::RunMicrotasks(isolate); return isolate->heap()->undefined_value(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetMicrotaskState) { +RUNTIME_FUNCTION(Runtime_GetMicrotaskState) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); return isolate->heap()->microtask_state(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_GetObservationState) { +RUNTIME_FUNCTION(Runtime_GetObservationState) { SealHandleScope shs(isolate); ASSERT(args.length() == 0); return isolate->heap()->observation_state(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_ObservationWeakMapCreate) { +RUNTIME_FUNCTION(Runtime_ObservationWeakMapCreate) { HandleScope scope(isolate); ASSERT(args.length() == 0); // TODO(adamk): Currently this runtime function is only called three times per @@ -14885,53 +14931,108 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ObservationWeakMapCreate) { isolate->factory()->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize); Handle<JSWeakMap> weakmap = Handle<JSWeakMap>::cast(isolate->factory()->NewJSObjectFromMap(map)); - return WeakCollectionInitialize(isolate, weakmap); + return *WeakCollectionInitialize(isolate, weakmap); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_UnwrapGlobalProxy) { - SealHandleScope shs(isolate); - ASSERT(args.length() == 1); - Object* object = args[0]; - if (object->IsJSGlobalProxy()) { - object = object->GetPrototype(isolate); - if (object->IsNull()) return isolate->heap()->undefined_value(); - } - return object; +static bool ContextsHaveSameOrigin(Handle<Context> context1, + Handle<Context> context2) { + return context1->security_token() == context2->security_token(); } -RUNTIME_FUNCTION(MaybeObject*, Runtime_IsAccessAllowedForObserver) { +RUNTIME_FUNCTION(Runtime_ObserverObjectAndRecordHaveSameOrigin) { HandleScope scope(isolate); ASSERT(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(JSFunction, observer, 0); CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 1); - ASSERT(object->map()->is_access_check_needed()); - Handle<Object> key = args.at<Object>(2); - SaveContext save(isolate); - isolate->set_context(observer->context()); - if (!isolate->MayNamedAccessWrapper(object, - isolate->factory()->undefined_value(), - v8::ACCESS_KEYS)) { - return isolate->heap()->false_value(); - } - bool access_allowed = false; - uint32_t index = 0; - if (key->ToArrayIndex(&index) || - (key->IsString() && String::cast(*key)->AsArrayIndex(&index))) { - access_allowed = - isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_GET) && - isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS); - } else { - access_allowed = - isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_GET) && - isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS); - } - return isolate->heap()->ToBoolean(access_allowed); + CONVERT_ARG_HANDLE_CHECKED(JSObject, record, 2); + + Handle<Context> observer_context(observer->context()->native_context(), + isolate); + Handle<Context> object_context(object->GetCreationContext()); + Handle<Context> record_context(record->GetCreationContext()); + + return isolate->heap()->ToBoolean( + ContextsHaveSameOrigin(object_context, observer_context) && + ContextsHaveSameOrigin(object_context, record_context)); +} + + +RUNTIME_FUNCTION(Runtime_ObjectWasCreatedInCurrentOrigin) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); + + Handle<Context> creation_context(object->GetCreationContext(), isolate); + return isolate->heap()->ToBoolean( + ContextsHaveSameOrigin(creation_context, isolate->native_context())); +} + + +RUNTIME_FUNCTION(Runtime_ObjectObserveInObjectContext) { + HandleScope scope(isolate); + ASSERT(args.length() == 3); + CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, callback, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, accept, 2); + RUNTIME_ASSERT(accept->IsUndefined() || accept->IsJSObject()); + + Handle<Context> context(object->GetCreationContext(), isolate); + Handle<JSFunction> function(context->native_object_observe(), isolate); + Handle<Object> call_args[] = { object, callback, accept }; + Handle<Object> result; + + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, function, + handle(context->object_function(), isolate), + ARRAY_SIZE(call_args), call_args, true)); + return *result; +} + + +RUNTIME_FUNCTION(Runtime_ObjectGetNotifierInObjectContext) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0); + + Handle<Context> context(object->GetCreationContext(), isolate); + Handle<JSFunction> function(context->native_object_get_notifier(), isolate); + Handle<Object> call_args[] = { object }; + Handle<Object> result; + + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, function, + handle(context->object_function(), isolate), + ARRAY_SIZE(call_args), call_args, true)); + return *result; +} + + +RUNTIME_FUNCTION(Runtime_ObjectNotifierPerformChangeInObjectContext) { + HandleScope scope(isolate); + ASSERT(args.length() == 3); + CONVERT_ARG_HANDLE_CHECKED(JSObject, object_info, 0); + CONVERT_ARG_HANDLE_CHECKED(String, change_type, 1); + CONVERT_ARG_HANDLE_CHECKED(JSFunction, change_fn, 2); + + Handle<Context> context(object_info->GetCreationContext(), isolate); + Handle<JSFunction> function(context->native_object_notifier_perform_change(), + isolate); + Handle<Object> call_args[] = { object_info, change_type, change_fn }; + Handle<Object> result; + + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + Execution::Call(isolate, function, isolate->factory()->undefined_value(), + ARRAY_SIZE(call_args), call_args, true)); + return *result; } -static MaybeObject* ArrayConstructorCommon(Isolate* isolate, +static Object* ArrayConstructorCommon(Isolate* isolate, Handle<JSFunction> constructor, Handle<AllocationSite> site, Arguments* caller_args) { @@ -14970,7 +15071,6 @@ static MaybeObject* ArrayConstructorCommon(Isolate* isolate, Handle<Map> initial_map(constructor->initial_map(), isolate); if (to_kind != initial_map->elements_kind()) { initial_map = Map::AsElementsKind(initial_map, to_kind); - RETURN_IF_EMPTY_HANDLE(isolate, initial_map); } // If we don't care to track arrays of to_kind ElementsKind, then @@ -14996,8 +15096,8 @@ static MaybeObject* ArrayConstructorCommon(Isolate* isolate, factory->NewJSArrayStorage(array, 0, 0, DONT_INITIALIZE_ARRAY_ELEMENTS); ElementsKind old_kind = array->GetElementsKind(); - RETURN_IF_EMPTY_HANDLE(isolate, - ArrayConstructInitializeElements(array, caller_args)); + RETURN_FAILURE_ON_EXCEPTION( + isolate, ArrayConstructInitializeElements(array, caller_args)); if (!site.is_null() && (old_kind != array->GetElementsKind() || !can_use_type_feedback)) { @@ -15010,7 +15110,7 @@ static MaybeObject* ArrayConstructorCommon(Isolate* isolate, } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ArrayConstructor) { +RUNTIME_FUNCTION(RuntimeHidden_ArrayConstructor) { HandleScope scope(isolate); // If we get 2 arguments then they are the stub parameters (constructor, type // info). If we get 4, then the first one is a pointer to the arguments @@ -15047,7 +15147,7 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_ArrayConstructor) { } -RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InternalArrayConstructor) { +RUNTIME_FUNCTION(RuntimeHidden_InternalArrayConstructor) { HandleScope scope(isolate); Arguments empty_args(0, NULL); bool no_caller_args = args.length() == 1; @@ -15070,7 +15170,8 @@ RUNTIME_FUNCTION(MaybeObject*, RuntimeHidden_InternalArrayConstructor) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_MaxSmi) { +RUNTIME_FUNCTION(Runtime_MaxSmi) { + ASSERT(args.length() == 0); return Smi::FromInt(Smi::kMaxValue); } @@ -15111,37 +15212,27 @@ static const Runtime::Function kIntrinsicFunctions[] = { #undef F -MaybeObject* Runtime::InitializeIntrinsicFunctionNames(Heap* heap, - Object* dictionary) { - ASSERT(dictionary != NULL); - ASSERT(NameDictionary::cast(dictionary)->NumberOfElements() == 0); +void Runtime::InitializeIntrinsicFunctionNames(Isolate* isolate, + Handle<NameDictionary> dict) { + ASSERT(dict->NumberOfElements() == 0); + HandleScope scope(isolate); for (int i = 0; i < kNumFunctions; ++i) { const char* name = kIntrinsicFunctions[i].name; if (name == NULL) continue; - Object* name_string; - { MaybeObject* maybe_name_string = - heap->InternalizeUtf8String(name); - if (!maybe_name_string->ToObject(&name_string)) return maybe_name_string; - } - NameDictionary* name_dictionary = NameDictionary::cast(dictionary); - { MaybeObject* maybe_dictionary = name_dictionary->Add( - String::cast(name_string), - Smi::FromInt(i), - PropertyDetails(NONE, NORMAL, Representation::None())); - if (!maybe_dictionary->ToObject(&dictionary)) { - // Non-recoverable failure. Calling code must restart heap - // initialization. - return maybe_dictionary; - } - } + Handle<NameDictionary> new_dict = NameDictionary::Add( + dict, + isolate->factory()->InternalizeUtf8String(name), + Handle<Smi>(Smi::FromInt(i), isolate), + PropertyDetails(NONE, NORMAL, Representation::None())); + // The dictionary does not need to grow. + CHECK(new_dict.is_identical_to(dict)); } - return dictionary; } const Runtime::Function* Runtime::FunctionForName(Handle<String> name) { Heap* heap = name->GetHeap(); - int entry = heap->intrinsic_function_names()->FindEntry(*name); + int entry = heap->intrinsic_function_names()->FindEntry(name); if (entry != kNotFound) { Object* smi_index = heap->intrinsic_function_names()->ValueAt(entry); int function_index = Smi::cast(smi_index)->value(); @@ -15155,31 +15246,4 @@ const Runtime::Function* Runtime::FunctionForId(Runtime::FunctionId id) { return &(kIntrinsicFunctions[static_cast<int>(id)]); } - -void Runtime::PerformGC(Object* result, Isolate* isolate) { - Failure* failure = Failure::cast(result); - if (failure->IsRetryAfterGC()) { - if (isolate->heap()->new_space()->AddFreshPage()) { - return; - } - - // Try to do a garbage collection; ignore it if it fails. The C - // entry stub will throw an out-of-memory exception in that case. - isolate->heap()->CollectGarbage(failure->allocation_space(), - "Runtime::PerformGC"); - } else { - // Handle last resort GC and make sure to allow future allocations - // to grow the heap without causing GCs (if possible). - isolate->counters()->gc_last_resort_from_js()->Increment(); - isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, - "Runtime::PerformGC"); - } -} - - -void Runtime::OutOfMemory() { - Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); - UNREACHABLE(); -} - } } // namespace v8::internal diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h index 58cd5259c..38d2126f0 100644 --- a/deps/v8/src/runtime.h +++ b/deps/v8/src/runtime.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_RUNTIME_H_ #define V8_RUNTIME_H_ @@ -97,9 +74,11 @@ namespace internal { F(SetNativeFlag, 1, 1) \ F(SetInlineBuiltinFlag, 1, 1) \ F(StoreArrayLiteralElement, 5, 1) \ - F(DebugCallbackSupportsStepping, 1, 1) \ F(DebugPrepareStepInIfStepping, 1, 1) \ + F(DebugPromiseHandlePrologue, 1, 1) \ + F(DebugPromiseHandleEpilogue, 0, 1) \ F(FlattenString, 1, 1) \ + F(LoadMutableDouble, 2, 1) \ F(TryMigrateInstance, 1, 1) \ F(NotifyContextDisposed, 0, 1) \ \ @@ -133,7 +112,6 @@ namespace internal { F(NumberDiv, 2, 1) \ F(NumberMod, 2, 1) \ F(NumberUnaryMinus, 1, 1) \ - F(NumberAlloc, 0, 1) \ F(NumberImul, 2, 1) \ \ F(StringBuilderConcat, 3, 1) \ @@ -157,18 +135,14 @@ namespace internal { F(SmiLexicographicCompare, 2, 1) \ \ /* Math */ \ - F(Math_acos, 1, 1) \ - F(Math_asin, 1, 1) \ - F(Math_atan, 1, 1) \ - F(Math_log, 1, 1) \ - F(Math_sqrt, 1, 1) \ - F(Math_exp, 1, 1) \ - F(Math_floor, 1, 1) \ - F(Math_pow, 2, 1) \ - F(Math_pow_cfunction, 2, 1) \ - F(Math_atan2, 2, 1) \ + F(MathAcos, 1, 1) \ + F(MathAsin, 1, 1) \ + F(MathAtan, 1, 1) \ + F(MathFloor, 1, 1) \ + F(MathAtan2, 2, 1) \ + F(MathExp, 1, 1) \ F(RoundNumber, 1, 1) \ - F(Math_fround, 1, 1) \ + F(MathFround, 1, 1) \ \ /* Regular expressions */ \ F(RegExpCompile, 3, 1) \ @@ -229,7 +203,7 @@ namespace internal { F(SetCode, 2, 1) \ F(SetExpectedNumberOfProperties, 2, 1) \ \ - F(CreateApiFunction, 1, 1) \ + F(CreateApiFunction, 2, 1) \ F(IsTemplate, 1, 1) \ F(GetTemplateField, 2, 1) \ F(DisableAccessChecks, 1, 1) \ @@ -301,15 +275,25 @@ namespace internal { F(SetAdd, 2, 1) \ F(SetHas, 2, 1) \ F(SetDelete, 2, 1) \ + F(SetClear, 1, 1) \ F(SetGetSize, 1, 1) \ + F(SetCreateIterator, 2, 1) \ + \ + F(SetIteratorNext, 1, 1) \ + F(SetIteratorClose, 1, 1) \ \ /* Harmony maps */ \ F(MapInitialize, 1, 1) \ F(MapGet, 2, 1) \ F(MapHas, 2, 1) \ F(MapDelete, 2, 1) \ + F(MapClear, 1, 1) \ F(MapSet, 3, 1) \ F(MapGetSize, 1, 1) \ + F(MapCreateIterator, 2, 1) \ + \ + F(MapIteratorNext, 1, 1) \ + F(MapIteratorClose, 1, 1) \ \ /* Harmony weak maps and sets */ \ F(WeakCollectionInitialize, 1, 1) \ @@ -327,26 +311,23 @@ namespace internal { F(SetIsObserved, 1, 1) \ F(GetObservationState, 0, 1) \ F(ObservationWeakMapCreate, 0, 1) \ - F(UnwrapGlobalProxy, 1, 1) \ - F(IsAccessAllowedForObserver, 3, 1) \ + F(ObserverObjectAndRecordHaveSameOrigin, 3, 1) \ + F(ObjectWasCreatedInCurrentOrigin, 1, 1) \ + F(ObjectObserveInObjectContext, 3, 1) \ + F(ObjectGetNotifierInObjectContext, 1, 1) \ + F(ObjectNotifierPerformChangeInObjectContext, 3, 1) \ \ /* Harmony typed arrays */ \ F(ArrayBufferInitialize, 2, 1)\ - F(ArrayBufferGetByteLength, 1, 1)\ F(ArrayBufferSliceImpl, 3, 1) \ F(ArrayBufferIsView, 1, 1) \ F(ArrayBufferNeuter, 1, 1) \ \ F(TypedArrayInitializeFromArrayLike, 4, 1) \ F(TypedArrayGetBuffer, 1, 1) \ - F(TypedArrayGetByteLength, 1, 1) \ - F(TypedArrayGetByteOffset, 1, 1) \ - F(TypedArrayGetLength, 1, 1) \ F(TypedArraySetFastCases, 3, 1) \ \ F(DataViewGetBuffer, 1, 1) \ - F(DataViewGetByteLength, 1, 1) \ - F(DataViewGetByteOffset, 1, 1) \ F(DataViewGetInt8, 3, 1) \ F(DataViewGetUint8, 3, 1) \ F(DataViewGetInt16, 3, 1) \ @@ -420,11 +401,10 @@ namespace internal { F(HasFastProperties, 1, 1) \ F(TransitionElementsKind, 2, 1) \ F(HaveSameMap, 2, 1) \ - F(IsAccessCheckNeeded, 1, 1) + F(IsJSGlobalProxy, 1, 1) -#ifdef ENABLE_DEBUGGER_SUPPORT -#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \ +#define RUNTIME_FUNCTION_LIST_DEBUGGER(F) \ /* Debugger support*/ \ F(DebugBreak, 0, 1) \ F(SetDebugEventListener, 2, 1) \ @@ -487,10 +467,6 @@ namespace internal { F(CollectGarbage, 1, 1) \ F(GetHeapUsage, 0, 1) \ -#else -#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) -#endif - #ifdef V8_I18N_SUPPORT #define RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F) \ @@ -500,6 +476,10 @@ namespace internal { F(AvailableLocalesOf, 1, 1) \ F(GetDefaultICULocale, 0, 1) \ F(GetLanguageTagVariants, 1, 1) \ + F(IsInitializedIntlObject, 1, 1) \ + F(IsInitializedIntlObjectOfType, 2, 1) \ + F(MarkAsInitializedIntlObjectOfType, 3, 1) \ + F(GetImplFromInitializedIntlObject, 1, 1) \ \ /* Date format and parse. */ \ F(CreateDateTimeFormat, 3, 1) \ @@ -549,13 +529,14 @@ namespace internal { RUNTIME_FUNCTION_LIST_ALWAYS_1(F) \ RUNTIME_FUNCTION_LIST_ALWAYS_2(F) \ RUNTIME_FUNCTION_LIST_DEBUG(F) \ - RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \ + RUNTIME_FUNCTION_LIST_DEBUGGER(F) \ RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F) // RUNTIME_HIDDEN_FUNCTION_LIST defines all runtime functions accessed // by id from code generator, but not via native call by name. // Entries have the form F(name, number of arguments, number of return values). #define RUNTIME_HIDDEN_FUNCTION_LIST(F) \ + /* String and Regexp */ \ F(NumberToString, 1, 1) \ F(RegExpConstructResult, 3, 1) \ F(RegExpExec, 4, 1) \ @@ -563,7 +544,6 @@ namespace internal { F(SubString, 3, 1) \ F(StringCompare, 2, 1) \ F(StringCharCodeAt, 2, 1) \ - F(Log, 3, 1) \ F(GetFromCache, 2, 1) \ \ /* Compilation */ \ @@ -634,7 +614,11 @@ namespace internal { F(InitializeConstContextSlot, 3, 1) \ \ /* Eval */ \ - F(ResolvePossiblyDirectEval, 5, 2) + F(ResolvePossiblyDirectEval, 5, 2) \ + \ + /* Maths */ \ + F(MathPowSlow, 2, 1) \ + F(MathPow, 2, 1) // ---------------------------------------------------------------------------- // INLINE_FUNCTION_LIST defines all inlined functions accessed @@ -663,8 +647,6 @@ namespace internal { F(IsSpecObject, 1, 1) \ F(IsStringWrapperSafeForDefaultValueOf, 1, 1) \ F(MathPow, 2, 1) \ - F(MathSqrt, 1, 1) \ - F(MathLog, 1, 1) \ F(IsMinusZero, 1, 1) \ F(HasCachedArrayIndex, 1, 1) \ F(GetCachedArrayIndex, 1, 1) \ @@ -674,7 +656,6 @@ namespace internal { F(DebugBreakInOptimizedCode, 0, 1) \ F(ClassOf, 1, 1) \ F(StringCharCodeAt, 2, 1) \ - F(Log, 3, 1) \ F(StringAdd, 2, 1) \ F(SubString, 3, 1) \ F(StringCompare, 2, 1) \ @@ -690,13 +671,24 @@ namespace internal { // a corresponding runtime function, that is called from non-optimized code. // Entries have the form F(name, number of arguments, number of return values). #define INLINE_OPTIMIZED_FUNCTION_LIST(F) \ - F(DoubleHi, 1, 1) \ - F(DoubleLo, 1, 1) \ - F(ConstructDouble, 2, 1) \ + /* Typed Arrays */ \ F(TypedArrayInitialize, 5, 1) \ F(DataViewInitialize, 4, 1) \ F(MaxSmi, 0, 1) \ - F(TypedArrayMaxSizeInHeap, 0, 1) + F(TypedArrayMaxSizeInHeap, 0, 1) \ + F(ArrayBufferViewGetByteLength, 1, 1) \ + F(ArrayBufferViewGetByteOffset, 1, 1) \ + F(TypedArrayGetLength, 1, 1) \ + /* ArrayBuffer */ \ + F(ArrayBufferGetByteLength, 1, 1) \ + /* Maths */ \ + F(ConstructDouble, 2, 1) \ + F(DoubleHi, 1, 1) \ + F(DoubleLo, 1, 1) \ + F(MathSqrt, 1, 1) \ + F(MathLog, 1, 1) \ + /* Debugger */ \ + F(DebugCallbackSupportsStepping, 1, 1) //--------------------------------------------------------------------------- @@ -791,11 +783,8 @@ class Runtime : public AllStatic { // Add internalized strings for all the intrinsic function names to a // StringDictionary. - // Returns failure if an allocation fails. In this case, it must be - // retried with a new, empty StringDictionary, not with the same one. - // Alternatively, heap initialization can be completely restarted. - MUST_USE_RESULT static MaybeObject* InitializeIntrinsicFunctionNames( - Heap* heap, Object* dictionary); + static void InitializeIntrinsicFunctionNames(Isolate* isolate, + Handle<NameDictionary> dict); // Get the intrinsic function with the given name, which must be internalized. static const Function* FunctionForName(Handle<String> name); @@ -816,11 +805,12 @@ class Runtime : public AllStatic { // Support getting the characters in a string using [] notation as // in Firefox/SpiderMonkey, Safari and Opera. - static Handle<Object> GetElementOrCharAt(Isolate* isolate, - Handle<Object> object, - uint32_t index); + MUST_USE_RESULT static MaybeHandle<Object> GetElementOrCharAt( + Isolate* isolate, + Handle<Object> object, + uint32_t index); - static Handle<Object> SetObjectProperty( + MUST_USE_RESULT static MaybeHandle<Object> SetObjectProperty( Isolate* isolate, Handle<Object> object, Handle<Object> key, @@ -828,30 +818,26 @@ class Runtime : public AllStatic { PropertyAttributes attr, StrictMode strict_mode); - static Handle<Object> ForceSetObjectProperty( - Isolate* isolate, + MUST_USE_RESULT static MaybeHandle<Object> ForceSetObjectProperty( Handle<JSObject> object, Handle<Object> key, Handle<Object> value, - PropertyAttributes attr); + PropertyAttributes attr, + JSReceiver::StoreFromKeyed store_from_keyed + = JSReceiver::MAY_BE_STORE_FROM_KEYED); - MUST_USE_RESULT static MaybeObject* DeleteObjectProperty( + MUST_USE_RESULT static MaybeHandle<Object> DeleteObjectProperty( Isolate* isolate, Handle<JSReceiver> object, Handle<Object> key, JSReceiver::DeleteMode mode); - MUST_USE_RESULT static MaybeObject* HasObjectProperty( + MUST_USE_RESULT static MaybeHandle<Object> HasObjectProperty( Isolate* isolate, Handle<JSReceiver> object, Handle<Object> key); - MUST_USE_RESULT static MaybeObject* GetObjectProperty( - Isolate* isolate, - Handle<Object> object, - Handle<Object> key); - - MUST_USE_RESULT static MaybeObject* GetObjectPropertyOrFail( + MUST_USE_RESULT static MaybeHandle<Object> GetObjectProperty( Isolate* isolate, Handle<Object> object, Handle<Object> key); @@ -884,7 +870,10 @@ class Runtime : public AllStatic { ARRAY_ID_INT32 = 6, ARRAY_ID_FLOAT32 = 7, ARRAY_ID_FLOAT64 = 8, - ARRAY_ID_UINT8_CLAMPED = 9 + ARRAY_ID_UINT8_CLAMPED = 9, + + ARRAY_ID_FIRST = ARRAY_ID_UINT8, + ARRAY_ID_LAST = ARRAY_ID_UINT8_CLAMPED }; static void ArrayIdToTypeAndSize(int array_id, @@ -893,12 +882,8 @@ class Runtime : public AllStatic { ElementsKind* fixed_elements_kind, size_t *element_size); - // Helper functions used stubs. - static void PerformGC(Object* result, Isolate* isolate); - static void OutOfMemory(); - // Used in runtime.cc and hydrogen's VisitArrayLiteral. - static Handle<Object> CreateArrayLiteralBoilerplate( + MUST_USE_RESULT static MaybeHandle<Object> CreateArrayLiteralBoilerplate( Isolate* isolate, Handle<FixedArray> literals, Handle<FixedArray> elements); diff --git a/deps/v8/src/runtime.js b/deps/v8/src/runtime.js index a49bc8448..1dee2e08f 100644 --- a/deps/v8/src/runtime.js +++ b/deps/v8/src/runtime.js @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This files contains runtime support implemented in JavaScript. @@ -464,7 +441,7 @@ function APPLY_PREPARE(args) { } -function APPLY_OVERFLOW(length) { +function STACK_OVERFLOW(length) { throw %MakeRangeError('stack_overflow', []); } diff --git a/deps/v8/src/safepoint-table.cc b/deps/v8/src/safepoint-table.cc index beecb2758..b7833c232 100644 --- a/deps/v8/src/safepoint-table.cc +++ b/deps/v8/src/safepoint-table.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/safepoint-table.h b/deps/v8/src/safepoint-table.h index cd094c55b..5a1af55cb 100644 --- a/deps/v8/src/safepoint-table.h +++ b/deps/v8/src/safepoint-table.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SAFEPOINT_TABLE_H_ #define V8_SAFEPOINT_TABLE_H_ diff --git a/deps/v8/src/sampler.cc b/deps/v8/src/sampler.cc index c6830e690..3cb0b749a 100644 --- a/deps/v8/src/sampler.cc +++ b/deps/v8/src/sampler.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "sampler.h" @@ -589,6 +566,7 @@ SamplerThread* SamplerThread::instance_ = NULL; DISABLE_ASAN void TickSample::Init(Isolate* isolate, const RegisterState& regs) { ASSERT(isolate->IsInitialized()); + timestamp = TimeTicks::HighResolutionNow(); pc = regs.pc; state = isolate->current_vm_state(); diff --git a/deps/v8/src/sampler.h b/deps/v8/src/sampler.h index b17a2ed8d..41da7494d 100644 --- a/deps/v8/src/sampler.h +++ b/deps/v8/src/sampler.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SAMPLER_H_ #define V8_SAMPLER_H_ @@ -69,6 +46,7 @@ struct TickSample { }; static const int kMaxFramesCount = 64; Address stack[kMaxFramesCount]; // Call stack. + TimeTicks timestamp; int frames_count : 8; // Number of captured frames. bool has_external_callback : 1; StackFrame::Type top_frame_type : 4; diff --git a/deps/v8/src/scanner-character-streams.cc b/deps/v8/src/scanner-character-streams.cc index cbef3f95b..8fbfe4ee9 100644 --- a/deps/v8/src/scanner-character-streams.cc +++ b/deps/v8/src/scanner-character-streams.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -126,8 +103,6 @@ GenericStringUtf16CharacterStream::GenericStringUtf16CharacterStream( : string_(data), length_(end_position) { ASSERT(end_position >= start_position); - buffer_cursor_ = buffer_; - buffer_end_ = buffer_; pos_ = start_position; } diff --git a/deps/v8/src/scanner-character-streams.h b/deps/v8/src/scanner-character-streams.h index 319ee8fc1..0d02f0201 100644 --- a/deps/v8/src/scanner-character-streams.h +++ b/deps/v8/src/scanner-character-streams.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SCANNER_CHARACTER_STREAMS_H_ #define V8_SCANNER_CHARACTER_STREAMS_H_ @@ -72,7 +49,6 @@ class GenericStringUtf16CharacterStream: public BufferedUtf16CharacterStream { virtual unsigned FillBuffer(unsigned position, unsigned length); Handle<String> string_; - unsigned start_position_; unsigned length_; }; diff --git a/deps/v8/src/scanner.cc b/deps/v8/src/scanner.cc index 48bfd3326..2e039ca40 100644 --- a/deps/v8/src/scanner.cc +++ b/deps/v8/src/scanner.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Features shared by parsing and pre-parsing scanners. @@ -1120,10 +1097,10 @@ Handle<String> Scanner::AllocateNextLiteralString(Isolate* isolate, PretenureFlag tenured) { if (is_next_literal_one_byte()) { return isolate->factory()->NewStringFromOneByte( - Vector<const uint8_t>::cast(next_literal_one_byte_string()), tenured); + next_literal_one_byte_string(), tenured).ToHandleChecked(); } else { return isolate->factory()->NewStringFromTwoByte( - next_literal_two_byte_string(), tenured); + next_literal_two_byte_string(), tenured).ToHandleChecked(); } } @@ -1142,7 +1119,8 @@ Handle<String> Scanner::AllocateInternalizedString(Isolate* isolate) { double Scanner::DoubleValue() { ASSERT(is_literal_one_byte()); return StringToDouble( - unicode_cache_, Vector<const char>::cast(literal_one_byte_string()), + unicode_cache_, + literal_one_byte_string(), ALLOW_HEX | ALLOW_OCTAL | ALLOW_IMPLICIT_OCTAL | ALLOW_BINARY); } @@ -1160,15 +1138,6 @@ int Scanner::FindSymbol(DuplicateFinder* finder, int value) { } -void Scanner::LogSymbol(ParserRecorder* log, int position) { - if (is_literal_one_byte()) { - log->LogOneByteSymbol(position, literal_one_byte_string()); - } else { - log->LogTwoByteSymbol(position, literal_two_byte_string()); - } -} - - int DuplicateFinder::AddOneByteSymbol(Vector<const uint8_t> key, int value) { return AddSymbol(key, true, value); } @@ -1201,7 +1170,7 @@ int DuplicateFinder::AddNumber(Vector<const uint8_t> key, int value) { int flags = ALLOW_HEX | ALLOW_OCTAL | ALLOW_IMPLICIT_OCTAL | ALLOW_BINARY; double double_value = StringToDouble( - unicode_constants_, Vector<const char>::cast(key), flags, 0.0); + unicode_constants_, key, flags, 0.0); int length; const char* string; if (!std::isfinite(double_value)) { diff --git a/deps/v8/src/scanner.h b/deps/v8/src/scanner.h index 73026ab5a..037da5b17 100644 --- a/deps/v8/src/scanner.h +++ b/deps/v8/src/scanner.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Features shared by parsing and pre-parsing scanners. @@ -428,8 +405,6 @@ class Scanner { int FindNumber(DuplicateFinder* finder, int value); int FindSymbol(DuplicateFinder* finder, int value); - void LogSymbol(ParserRecorder* log, int position); - UnicodeCache* unicode_cache() { return unicode_cache_; } // Returns the location of the last seen octal literal. diff --git a/deps/v8/src/scopeinfo.cc b/deps/v8/src/scopeinfo.cc index e2ae85432..1ed7e0b77 100644 --- a/deps/v8/src/scopeinfo.cc +++ b/deps/v8/src/scopeinfo.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdlib.h> @@ -278,6 +255,17 @@ InitializationFlag ScopeInfo::ContextLocalInitFlag(int var) { } +bool ScopeInfo::LocalIsSynthetic(int var) { + ASSERT(0 <= var && var < LocalCount()); + // There's currently no flag stored on the ScopeInfo to indicate that a + // variable is a compiler-introduced temporary. However, to avoid conflict + // with user declarations, the current temporaries like .generator_object and + // .result start with a dot, so we can use that as a flag. It's a hack! + Handle<String> name(LocalName(var)); + return name->length() > 0 && name->Get(0) == '.'; +} + + int ScopeInfo::StackSlotIndex(String* name) { ASSERT(name->IsInternalizedString()); if (length() > 0) { @@ -293,35 +281,40 @@ int ScopeInfo::StackSlotIndex(String* name) { } -int ScopeInfo::ContextSlotIndex(String* name, +int ScopeInfo::ContextSlotIndex(Handle<ScopeInfo> scope_info, + Handle<String> name, VariableMode* mode, InitializationFlag* init_flag) { ASSERT(name->IsInternalizedString()); ASSERT(mode != NULL); ASSERT(init_flag != NULL); - if (length() > 0) { - ContextSlotCache* context_slot_cache = GetIsolate()->context_slot_cache(); - int result = context_slot_cache->Lookup(this, name, mode, init_flag); + if (scope_info->length() > 0) { + ContextSlotCache* context_slot_cache = + scope_info->GetIsolate()->context_slot_cache(); + int result = + context_slot_cache->Lookup(*scope_info, *name, mode, init_flag); if (result != ContextSlotCache::kNotFound) { - ASSERT(result < ContextLength()); + ASSERT(result < scope_info->ContextLength()); return result; } - int start = ContextLocalNameEntriesIndex(); - int end = ContextLocalNameEntriesIndex() + ContextLocalCount(); + int start = scope_info->ContextLocalNameEntriesIndex(); + int end = scope_info->ContextLocalNameEntriesIndex() + + scope_info->ContextLocalCount(); for (int i = start; i < end; ++i) { - if (name == get(i)) { + if (*name == scope_info->get(i)) { int var = i - start; - *mode = ContextLocalMode(var); - *init_flag = ContextLocalInitFlag(var); + *mode = scope_info->ContextLocalMode(var); + *init_flag = scope_info->ContextLocalInitFlag(var); result = Context::MIN_CONTEXT_SLOTS + var; - context_slot_cache->Update(this, name, *mode, *init_flag, result); - ASSERT(result < ContextLength()); + context_slot_cache->Update(scope_info, name, *mode, *init_flag, result); + ASSERT(result < scope_info->ContextLength()); return result; } } // Cache as not found. Mode and init flag don't matter. - context_slot_cache->Update(this, name, INTERNAL, kNeedsInitialization, -1); + context_slot_cache->Update( + scope_info, name, INTERNAL, kNeedsInitialization, -1); } return -1; } @@ -368,18 +361,21 @@ bool ScopeInfo::CopyContextLocalsToScopeObject(Handle<ScopeInfo> scope_info, int local_count = scope_info->ContextLocalCount(); if (local_count == 0) return true; // Fill all context locals to the context extension. + int first_context_var = scope_info->StackLocalCount(); int start = scope_info->ContextLocalNameEntriesIndex(); - int end = start + local_count; - for (int i = start; i < end; ++i) { - int context_index = Context::MIN_CONTEXT_SLOTS + i - start; - Handle<Object> result = Runtime::SetObjectProperty( + for (int i = 0; i < local_count; ++i) { + if (scope_info->LocalIsSynthetic(first_context_var + i)) continue; + int context_index = Context::MIN_CONTEXT_SLOTS + i; + RETURN_ON_EXCEPTION_VALUE( isolate, - scope_object, - Handle<String>(String::cast(scope_info->get(i))), - Handle<Object>(context->get(context_index), isolate), - ::NONE, - SLOPPY); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, false); + Runtime::SetObjectProperty( + isolate, + scope_object, + Handle<String>(String::cast(scope_info->get(i + start))), + Handle<Object>(context->get(context_index), isolate), + ::NONE, + SLOPPY), + false); } return true; } @@ -435,19 +431,20 @@ int ContextSlotCache::Lookup(Object* data, } -void ContextSlotCache::Update(Object* data, - String* name, +void ContextSlotCache::Update(Handle<Object> data, + Handle<String> name, VariableMode mode, InitializationFlag init_flag, int slot_index) { - String* internalized_name; + DisallowHeapAllocation no_gc; + Handle<String> internalized_name; ASSERT(slot_index > kNotFound); - if (name->GetIsolate()->heap()->InternalizeStringIfExists( - name, &internalized_name)) { - int index = Hash(data, internalized_name); + if (StringTable::InternalizeStringIfExists(name->GetIsolate(), name). + ToHandle(&internalized_name)) { + int index = Hash(*data, *internalized_name); Key& key = keys_[index]; - key.data = data; - key.name = internalized_name; + key.data = *data; + key.name = *internalized_name; // Please note value only takes a uint as index. values_[index] = Value(mode, init_flag, slot_index - kNotFound).raw(); #ifdef DEBUG @@ -464,18 +461,19 @@ void ContextSlotCache::Clear() { #ifdef DEBUG -void ContextSlotCache::ValidateEntry(Object* data, - String* name, +void ContextSlotCache::ValidateEntry(Handle<Object> data, + Handle<String> name, VariableMode mode, InitializationFlag init_flag, int slot_index) { - String* internalized_name; - if (name->GetIsolate()->heap()->InternalizeStringIfExists( - name, &internalized_name)) { - int index = Hash(data, name); + DisallowHeapAllocation no_gc; + Handle<String> internalized_name; + if (StringTable::InternalizeStringIfExists(name->GetIsolate(), name). + ToHandle(&internalized_name)) { + int index = Hash(*data, *name); Key& key = keys_[index]; - ASSERT(key.data == data); - ASSERT(key.name->Equals(name)); + ASSERT(key.data == *data); + ASSERT(key.name->Equals(*name)); Value result(values_[index]); ASSERT(result.mode() == mode); ASSERT(result.initialization_flag() == init_flag); diff --git a/deps/v8/src/scopeinfo.h b/deps/v8/src/scopeinfo.h index a884b3b9e..755b6a310 100644 --- a/deps/v8/src/scopeinfo.h +++ b/deps/v8/src/scopeinfo.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SCOPEINFO_H_ #define V8_SCOPEINFO_H_ @@ -49,8 +26,8 @@ class ContextSlotCache { InitializationFlag* init_flag); // Update an element in the cache. - void Update(Object* data, - String* name, + void Update(Handle<Object> data, + Handle<String> name, VariableMode mode, InitializationFlag init_flag, int slot_index); @@ -72,8 +49,8 @@ class ContextSlotCache { inline static int Hash(Object* data, String* name); #ifdef DEBUG - void ValidateEntry(Object* data, - String* name, + void ValidateEntry(Handle<Object> data, + Handle<String> name, VariableMode mode, InitializationFlag init_flag, int slot_index); diff --git a/deps/v8/src/scopes.cc b/deps/v8/src/scopes.cc index bcb643501..1818909af 100644 --- a/deps/v8/src/scopes.cc +++ b/deps/v8/src/scopes.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -402,7 +379,7 @@ Variable* Scope::LocalLookup(Handle<String> name) { VariableMode mode; Variable::Location location = Variable::CONTEXT; InitializationFlag init_flag; - int index = scope_info_->ContextSlotIndex(*name, &mode, &init_flag); + int index = ScopeInfo::ContextSlotIndex(scope_info_, name, &mode, &init_flag); if (index < 0) { // Check parameters. index = scope_info_->ParameterIndex(*name); @@ -1114,7 +1091,7 @@ bool Scope::ResolveVariable(CompilationInfo* info, Isolate* isolate = info->isolate(); Factory* factory = isolate->factory(); Handle<JSArray> array = factory->NewJSArray(1); - USE(JSObject::SetElement(array, 0, var->name(), NONE, STRICT)); + JSObject::SetElement(array, 0, var->name(), NONE, STRICT).Assert(); Handle<Object> result = factory->NewSyntaxError("module_type_error", array); isolate->Throw(*result, &location); @@ -1266,7 +1243,7 @@ void Scope::AllocateParameterLocals() { for (int i = params_.length() - 1; i >= 0; --i) { Variable* var = params_[i]; ASSERT(var->scope() == this); - if (uses_sloppy_arguments) { + if (uses_sloppy_arguments || has_forced_context_allocation()) { // Force context allocation of the parameter. var->ForceContextAllocation(); } diff --git a/deps/v8/src/scopes.h b/deps/v8/src/scopes.h index b0d84343e..23a10f19c 100644 --- a/deps/v8/src/scopes.h +++ b/deps/v8/src/scopes.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SCOPES_H_ #define V8_SCOPES_H_ diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc index 4048886fd..2b43c0ee6 100644 --- a/deps/v8/src/serialize.cc +++ b/deps/v8/src/serialize.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -208,25 +185,15 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { isolate); } -#ifdef ENABLE_DEBUGGER_SUPPORT // Debug addresses Add(Debug_Address(Debug::k_after_break_target_address).address(isolate), DEBUG_ADDRESS, Debug::k_after_break_target_address << kDebugIdShift, "Debug::after_break_target_address()"); - Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate), - DEBUG_ADDRESS, - Debug::k_debug_break_slot_address << kDebugIdShift, - "Debug::debug_break_slot_address()"); - Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate), - DEBUG_ADDRESS, - Debug::k_debug_break_return_address << kDebugIdShift, - "Debug::debug_break_return_address()"); Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate), DEBUG_ADDRESS, Debug::k_restarter_frame_function_pointer << kDebugIdShift, "Debug::restarter_frame_function_pointer_address()"); -#endif // Stat counters struct StatsRefTableEntry { @@ -271,14 +238,17 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { } // Accessors -#define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ - Add((Address)&Accessors::name, \ +#define ACCESSOR_INFO_DECLARATION(name) \ + Add(FUNCTION_ADDR(&Accessors::name##Getter), \ ACCESSOR, \ - Accessors::k##name, \ - "Accessors::" #name); - - ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) -#undef ACCESSOR_DESCRIPTOR_DECLARATION + Accessors::k##name##Getter, \ + "Accessors::" #name "Getter"); \ + Add(FUNCTION_ADDR(&Accessors::name##Setter), \ + ACCESSOR, \ + Accessors::k##name##Setter, \ + "Accessors::" #name "Setter"); + ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION) +#undef ACCESSOR_INFO_DECLARATION StubCache* stub_cache = isolate->stub_cache(); @@ -309,15 +279,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { "StubCache::secondary_->map"); // Runtime entries - Add(ExternalReference::perform_gc_function(isolate).address(), - RUNTIME_ENTRY, - 1, - "Runtime::PerformGC"); - // Runtime entries - Add(ExternalReference::out_of_memory_function(isolate).address(), - RUNTIME_ENTRY, - 2, - "Runtime::OutOfMemory"); Add(ExternalReference::delete_handle_scope_extensions(isolate).address(), RUNTIME_ENTRY, 4, @@ -372,10 +333,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 11, "Heap::NewSpaceMask()"); - Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(), - UNCLASSIFIED, - 12, - "Heap::always_allocate_scope_depth()"); Add(ExternalReference::new_space_allocation_limit_address(isolate).address(), UNCLASSIFIED, 14, @@ -384,7 +341,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 15, "Heap::NewSpaceAllocationTopAddress()"); -#ifdef ENABLE_DEBUGGER_SUPPORT Add(ExternalReference::debug_break(isolate).address(), UNCLASSIFIED, 16, @@ -393,7 +349,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 17, "Debug::step_in_fp_addr()"); -#endif Add(ExternalReference::mod_two_doubles_operation(isolate).address(), UNCLASSIFIED, 22, @@ -560,6 +515,26 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { 62, "Code::MarkCodeAsExecuted"); + Add(ExternalReference::is_profiling_address(isolate).address(), + UNCLASSIFIED, + 63, + "CpuProfiler::is_profiling"); + + Add(ExternalReference::scheduled_exception_address(isolate).address(), + UNCLASSIFIED, + 64, + "Isolate::scheduled_exception"); + + Add(ExternalReference::invoke_function_callback(isolate).address(), + UNCLASSIFIED, + 65, + "InvokeFunctionCallback"); + + Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(), + UNCLASSIFIED, + 66, + "InvokeAccessorGetterCallback"); + // Add a small set of deopt entry addresses to encoder without generating the // deopt table code, which isn't possible at deserialization time. HandleScope scope(isolate); @@ -575,7 +550,7 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate) - : encodings_(Match), + : encodings_(HashMap::PointersMatch), isolate_(isolate) { ExternalReferenceTable* external_references = ExternalReferenceTable::instance(isolate_); @@ -638,10 +613,7 @@ ExternalReferenceDecoder::~ExternalReferenceDecoder() { DeleteArray(encodings_); } - -bool Serializer::serialization_enabled_ = false; -bool Serializer::too_late_to_enable_now_ = false; - +AtomicWord Serializer::serialization_state_ = SERIALIZER_STATE_UNINITIALIZED; class CodeAddressMap: public CodeEventLogger { public: @@ -669,7 +641,7 @@ class CodeAddressMap: public CodeEventLogger { private: class NameMap { public: - NameMap() : impl_(&PointerEquals) {} + NameMap() : impl_(HashMap::PointersMatch) {} ~NameMap() { for (HashMap::Entry* p = impl_.Start(); p != NULL; p = impl_.Next(p)) { @@ -709,10 +681,6 @@ class CodeAddressMap: public CodeEventLogger { } private: - static bool PointerEquals(void* lhs, void* rhs) { - return lhs == rhs; - } - static char* CopyName(const char* name, int name_size) { char* result = NewArray<char>(name_size + 1); for (int i = 0; i < name_size; ++i) { @@ -758,22 +726,42 @@ class CodeAddressMap: public CodeEventLogger { CodeAddressMap* Serializer::code_address_map_ = NULL; -void Serializer::Enable(Isolate* isolate) { - if (!serialization_enabled_) { - ASSERT(!too_late_to_enable_now_); - } - if (serialization_enabled_) return; - serialization_enabled_ = true; +void Serializer::RequestEnable(Isolate* isolate) { isolate->InitializeLoggingAndCounters(); code_address_map_ = new CodeAddressMap(isolate); } -void Serializer::Disable() { - if (!serialization_enabled_) return; - serialization_enabled_ = false; - delete code_address_map_; - code_address_map_ = NULL; +void Serializer::InitializeOncePerProcess() { + // InitializeOncePerProcess is called by V8::InitializeOncePerProcess, a + // method guaranteed to be called only once in a process lifetime. + // serialization_state_ is read by many threads, hence the use of + // Atomic primitives. Here, we don't need a barrier or mutex to + // write it because V8 initialization is done by one thread, and gates + // all reads of serialization_state_. + ASSERT(NoBarrier_Load(&serialization_state_) == + SERIALIZER_STATE_UNINITIALIZED); + SerializationState state = code_address_map_ + ? SERIALIZER_STATE_ENABLED + : SERIALIZER_STATE_DISABLED; + NoBarrier_Store(&serialization_state_, state); +} + + +void Serializer::TearDown() { + // TearDown is called by V8::TearDown() for the default isolate. It's safe + // to shut down the serializer by that point. Just to be safe, we restore + // serialization_state_ to uninitialized. + ASSERT(NoBarrier_Load(&serialization_state_) != + SERIALIZER_STATE_UNINITIALIZED); + if (code_address_map_) { + ASSERT(NoBarrier_Load(&serialization_state_) == + SERIALIZER_STATE_ENABLED); + delete code_address_map_; + code_address_map_ = NULL; + } + + NoBarrier_Store(&serialization_state_, SERIALIZER_STATE_UNINITIALIZED); } @@ -865,7 +853,8 @@ void Deserializer::DeserializePartial(Isolate* isolate, Object** root) { Deserializer::~Deserializer() { - ASSERT(source_->AtEOF()); + // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. + // ASSERT(source_->AtEOF()); if (external_reference_decoder_) { delete external_reference_decoder_; external_reference_decoder_ = NULL; @@ -1349,7 +1338,7 @@ void Serializer::VisitPointers(Object** start, Object** end) { // deserialized objects. void SerializerDeserializer::Iterate(Isolate* isolate, ObjectVisitor* visitor) { - if (Serializer::enabled()) return; + if (Serializer::enabled(isolate)) return; for (int i = 0; ; i++) { if (isolate->serialize_partial_snapshot_cache_length() <= i) { // Extend the array ready to get a value from the visitor when diff --git a/deps/v8/src/serialize.h b/deps/v8/src/serialize.h index 294714475..958f20e24 100644 --- a/deps/v8/src/serialize.h +++ b/deps/v8/src/serialize.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SERIALIZE_H_ #define V8_SERIALIZE_H_ @@ -124,8 +101,6 @@ class ExternalReferenceEncoder { int IndexOf(Address key) const; - static bool Match(void* key1, void* key2) { return key1 == key2; } - void Put(Address key, int index); Isolate* isolate_; @@ -414,7 +389,7 @@ class SerializationAddressMapper { public: SerializationAddressMapper() : no_allocation_(), - serialization_map_(new HashMap(&SerializationMatchFun)) { } + serialization_map_(new HashMap(HashMap::PointersMatch)) { } ~SerializationAddressMapper() { delete serialization_map_; @@ -438,10 +413,6 @@ class SerializationAddressMapper { } private: - static bool SerializationMatchFun(void* key1, void* key2) { - return key1 == key2; - } - static uint32_t Hash(HeapObject* obj) { return static_cast<int32_t>(reinterpret_cast<intptr_t>(obj->address())); } @@ -470,19 +441,22 @@ class Serializer : public SerializerDeserializer { void VisitPointers(Object** start, Object** end); // You can call this after serialization to find out how much space was used // in each space. - int CurrentAllocationAddress(int space) { + int CurrentAllocationAddress(int space) const { ASSERT(space < kNumberOfSpaces); return fullness_[space]; } Isolate* isolate() const { return isolate_; } - static void Enable(Isolate* isolate); - static void Disable(); - - // Call this when you have made use of the fact that there is no serialization - // going on. - static void TooLateToEnableNow() { too_late_to_enable_now_ = true; } - static bool enabled() { return serialization_enabled_; } + static void RequestEnable(Isolate* isolate); + static void InitializeOncePerProcess(); + static void TearDown(); + + static bool enabled(Isolate* isolate) { + SerializationState state = static_cast<SerializationState>( + NoBarrier_Load(&serialization_state_)); + ASSERT(state != SERIALIZER_STATE_UNINITIALIZED); + return state == SERIALIZER_STATE_ENABLED; + } SerializationAddressMapper* address_mapper() { return &address_mapper_; } void PutRoot(int index, HeapObject* object, @@ -580,9 +554,15 @@ class Serializer : public SerializerDeserializer { int fullness_[LAST_SPACE + 1]; SnapshotByteSink* sink_; ExternalReferenceEncoder* external_reference_encoder_; - static bool serialization_enabled_; - // Did we already make use of the fact that serialization was not enabled? - static bool too_late_to_enable_now_; + + enum SerializationState { + SERIALIZER_STATE_UNINITIALIZED = 0, + SERIALIZER_STATE_DISABLED = 1, + SERIALIZER_STATE_ENABLED = 2 + }; + + static AtomicWord serialization_state_; + SerializationAddressMapper address_mapper_; intptr_t root_index_wave_front_; void Pad(); diff --git a/deps/v8/src/simulator.h b/deps/v8/src/simulator.h index b61eaa260..910e1674c 100644 --- a/deps/v8/src/simulator.h +++ b/deps/v8/src/simulator.h @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SIMULATOR_H_ #define V8_SIMULATOR_H_ diff --git a/deps/v8/src/small-pointer-list.h b/deps/v8/src/small-pointer-list.h index 295a06f26..2359a8b9f 100644 --- a/deps/v8/src/small-pointer-list.h +++ b/deps/v8/src/small-pointer-list.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SMALL_POINTER_LIST_H_ #define V8_SMALL_POINTER_LIST_H_ diff --git a/deps/v8/src/smart-pointers.h b/deps/v8/src/smart-pointers.h index 7203c16ba..db2206a32 100644 --- a/deps/v8/src/smart-pointers.h +++ b/deps/v8/src/smart-pointers.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SMART_POINTERS_H_ #define V8_SMART_POINTERS_H_ diff --git a/deps/v8/src/snapshot-common.cc b/deps/v8/src/snapshot-common.cc index 4bdf63ced..b22f2902d 100644 --- a/deps/v8/src/snapshot-common.cc +++ b/deps/v8/src/snapshot-common.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // The common functionality when building with or without snapshots. diff --git a/deps/v8/src/snapshot-empty.cc b/deps/v8/src/snapshot-empty.cc index 54236d82e..62b77fe35 100644 --- a/deps/v8/src/snapshot-empty.cc +++ b/deps/v8/src/snapshot-empty.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Used for building without snapshots. diff --git a/deps/v8/src/snapshot.h b/deps/v8/src/snapshot.h index 4041f2925..d5271b2da 100644 --- a/deps/v8/src/snapshot.h +++ b/deps/v8/src/snapshot.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "isolate.h" diff --git a/deps/v8/src/spaces-inl.h b/deps/v8/src/spaces-inl.h index 778cb18d7..da9c03d91 100644 --- a/deps/v8/src/spaces-inl.h +++ b/deps/v8/src/spaces-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SPACES_INL_H_ #define V8_SPACES_INL_H_ @@ -274,7 +251,7 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { // Raw allocation. -MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { +AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { HeapObject* object = AllocateLinearly(size_in_bytes); if (object != NULL) { if (identity() == CODE_SPACE) { @@ -303,7 +280,7 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { return object; } - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } @@ -311,7 +288,7 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { // NewSpace -MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { +AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { Address old_top = allocation_info_.top(); #ifdef DEBUG // If we are stressing compaction we waste some memory in new space diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc index 6c03daa75..8e923af54 100644 --- a/deps/v8/src/spaces.cc +++ b/deps/v8/src/spaces.cc @@ -1,32 +1,10 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" +#include "full-codegen.h" #include "macro-assembler.h" #include "mark-compact.h" #include "msan.h" @@ -133,9 +111,19 @@ CodeRange::CodeRange(Isolate* isolate) } -bool CodeRange::SetUp(const size_t requested) { +bool CodeRange::SetUp(size_t requested) { ASSERT(code_range_ == NULL); + if (requested == 0) { + // On 64-bit platform(s), we put all code objects in a 512 MB range of + // virtual address space, so that they can call each other with near calls. + if (kIs64BitArch) { + requested = 512 * MB; + } else { + return true; + } + } + code_range_ = new VirtualMemory(requested); CHECK(code_range_ != NULL); if (!code_range_->IsReserved()) { @@ -146,7 +134,8 @@ bool CodeRange::SetUp(const size_t requested) { // We are sure that we have mapped a block of requested addresses. ASSERT(code_range_->size() == requested); - LOG(isolate_, NewEvent("CodeRange", code_range_->address(), requested)); + LOG(isolate_, + NewEvent("CodeRange", code_range_->address(), requested)); Address base = reinterpret_cast<Address>(code_range_->address()); Address aligned_base = RoundUp(reinterpret_cast<Address>(code_range_->address()), @@ -570,10 +559,6 @@ void MemoryChunk::InsertAfter(MemoryChunk* other) { void MemoryChunk::Unlink() { - if (!InNewSpace() && IsFlagSet(SCAN_ON_SCAVENGE)) { - heap_->decrement_scan_on_scavenge_pages(); - ClearFlag(SCAN_ON_SCAVENGE); - } MemoryChunk* next_element = next_chunk(); MemoryChunk* prev_element = prev_chunk(); next_element->set_prev_chunk(prev_element); @@ -711,7 +696,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size, executable, owner); result->set_reserved_memory(&reservation); - MSAN_MEMORY_IS_INITIALIZED(base, chunk_size); + MSAN_MEMORY_IS_INITIALIZED_IN_JIT(base, chunk_size); return result; } @@ -941,8 +926,8 @@ PagedSpace::PagedSpace(Heap* heap, : Space(heap, id, executable), free_list_(this), was_swept_conservatively_(false), - first_unswept_page_(Page::FromAddress(NULL)), - unswept_free_bytes_(0) { + unswept_free_bytes_(0), + end_of_unswept_pages_(NULL) { if (id == CODE_SPACE) { area_size_ = heap->isolate()->memory_allocator()-> CodePageAreaSize(); @@ -993,11 +978,11 @@ size_t PagedSpace::CommittedPhysicalMemory() { } -MaybeObject* PagedSpace::FindObject(Address addr) { +Object* PagedSpace::FindObject(Address addr) { // Note: this function can only be called on precisely swept spaces. ASSERT(!heap()->mark_compact_collector()->in_use()); - if (!Contains(addr)) return Failure::Exception(); + if (!Contains(addr)) return Smi::FromInt(0); // Signaling not found. Page* p = Page::FromAddress(addr); HeapObjectIterator it(p, NULL); @@ -1008,7 +993,7 @@ MaybeObject* PagedSpace::FindObject(Address addr) { } UNREACHABLE(); - return Failure::Exception(); + return Smi::FromInt(0); } @@ -1051,7 +1036,7 @@ intptr_t PagedSpace::SizeOfFirstPage() { int size = 0; switch (identity()) { case OLD_POINTER_SPACE: - size = 72 * kPointerSize * KB; + size = 96 * kPointerSize * KB; break; case OLD_DATA_SPACE: size = 192 * KB; @@ -1072,7 +1057,9 @@ intptr_t PagedSpace::SizeOfFirstPage() { // upgraded to handle small pages. size = AreaSize(); } else { - size = 480 * KB; + size = RoundUp( + 480 * KB * FullCodeGenerator::kBootCodeSizeMultiplier / 100, + kPointerSize); } break; default: @@ -1115,18 +1102,10 @@ void PagedSpace::IncreaseCapacity(int size) { } -void PagedSpace::ReleasePage(Page* page, bool unlink) { +void PagedSpace::ReleasePage(Page* page) { ASSERT(page->LiveBytes() == 0); ASSERT(AreaSize() == page->area_size()); - // Adjust list of unswept pages if the page is the head of the list. - if (first_unswept_page_ == page) { - first_unswept_page_ = page->next_page(); - if (first_unswept_page_ == anchor()) { - first_unswept_page_ = Page::FromAddress(NULL); - } - } - if (page->WasSwept()) { intptr_t size = free_list_.EvictFreeListItems(page); accounting_stats_.AllocateBytes(size); @@ -1135,19 +1114,19 @@ void PagedSpace::ReleasePage(Page* page, bool unlink) { DecreaseUnsweptFreeBytes(page); } - // TODO(hpayer): This check is just used for debugging purpose and - // should be removed or turned into an assert after investigating the - // crash in concurrent sweeping. - CHECK(!free_list_.ContainsPageFreeListItems(page)); + if (page->IsFlagSet(MemoryChunk::SCAN_ON_SCAVENGE)) { + heap()->decrement_scan_on_scavenge_pages(); + page->ClearFlag(MemoryChunk::SCAN_ON_SCAVENGE); + } + + ASSERT(!free_list_.ContainsPageFreeListItems(page)); if (Page::FromAllocationTop(allocation_info_.top()) == page) { allocation_info_.set_top(NULL); allocation_info_.set_limit(NULL); } - if (unlink) { - page->Unlink(); - } + page->Unlink(); if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) { heap()->isolate()->memory_allocator()->Free(page); } else { @@ -1195,7 +1174,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { VerifyObject(object); // The object itself should look OK. - object->Verify(); + object->ObjectVerify(); // All the interior pointers should be contained in the heap. int size = object->Size(); @@ -1420,7 +1399,7 @@ bool NewSpace::AddFreshPage() { } -MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) { +AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes) { Address old_top = allocation_info_.top(); Address high = to_space_.page_high(); if (allocation_info_.limit() < high) { @@ -1442,7 +1421,7 @@ MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) { top_on_previous_step_ = to_space_.page_low(); return AllocateRaw(size_in_bytes); } else { - return Failure::RetryAfterGC(); + return AllocationResult::Retry(); } } @@ -1478,7 +1457,7 @@ void NewSpace::Verify() { CHECK(!object->IsCode()); // The object itself should look OK. - object->Verify(); + object->ObjectVerify(); // All the interior pointers should be contained in the heap. VerifyPointersVisitor visitor; @@ -2017,10 +1996,13 @@ void FreeListNode::set_size(Heap* heap, int size_in_bytes) { // field and a next pointer, we give it a filler map that gives it the // correct size. if (size_in_bytes > FreeSpace::kHeaderSize) { - set_map_no_write_barrier(heap->raw_unchecked_free_space_map()); // Can't use FreeSpace::cast because it fails during deserialization. + // We have to set the size first with a release store before we store + // the map because a concurrent store buffer scan on scavenge must not + // observe a map with an invalid size. FreeSpace* this_as_free_space = reinterpret_cast<FreeSpace*>(this); - this_as_free_space->set_size(size_in_bytes); + this_as_free_space->nobarrier_set_size(size_in_bytes); + synchronized_set_map_no_write_barrier(heap->raw_unchecked_free_space_map()); } else if (size_in_bytes == kPointerSize) { set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map()); } else if (size_in_bytes == 2 * kPointerSize) { @@ -2064,11 +2046,11 @@ void FreeListNode::set_next(FreeListNode* next) { // stage. if (map() == GetHeap()->raw_unchecked_free_space_map()) { ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); - Memory::Address_at(address() + kNextOffset) = - reinterpret_cast<Address>(next); + NoBarrier_Store(reinterpret_cast<AtomicWord*>(address() + kNextOffset), + reinterpret_cast<AtomicWord>(next)); } else { - Memory::Address_at(address() + kPointerSize) = - reinterpret_cast<Address>(next); + NoBarrier_Store(reinterpret_cast<AtomicWord*>(address() + kPointerSize), + reinterpret_cast<AtomicWord>(next)); } } @@ -2488,7 +2470,7 @@ intptr_t FreeListCategory::SumFreeList() { while (cur != NULL) { ASSERT(cur->map() == cur->GetHeap()->raw_unchecked_free_space_map()); FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur); - sum += cur_as_free_space->Size(); + sum += cur_as_free_space->nobarrier_size(); cur = cur->next(); } return sum; @@ -2540,24 +2522,8 @@ void PagedSpace::PrepareForMarkCompact() { // on the first allocation after the sweep. EmptyAllocationInfo(); - // Stop lazy sweeping and clear marking bits for unswept pages. - if (first_unswept_page_ != NULL) { - Page* p = first_unswept_page_; - do { - // Do not use ShouldBeSweptLazily predicate here. - // New evacuation candidates were selected but they still have - // to be swept before collection starts. - if (!p->WasSwept()) { - Bitmap::Clear(p); - if (FLAG_gc_verbose) { - PrintF("Sweeping 0x%" V8PRIxPTR " lazily abandoned.\n", - reinterpret_cast<intptr_t>(p)); - } - } - p = p->next_page(); - } while (p != anchor()); - } - first_unswept_page_ = Page::FromAddress(NULL); + // This counter will be increased for pages which will be swept by the + // sweeper threads. unswept_free_bytes_ = 0; // Clear the free list before a full GC---it will be rebuilt afterward. @@ -2566,7 +2532,8 @@ void PagedSpace::PrepareForMarkCompact() { intptr_t PagedSpace::SizeOfObjects() { - ASSERT(!heap()->IsSweepingComplete() || (unswept_free_bytes_ == 0)); + ASSERT(heap()->mark_compact_collector()->IsConcurrentSweepingInProgress() || + (unswept_free_bytes_ == 0)); return Size() - unswept_free_bytes_ - (limit() - top()); } @@ -2580,39 +2547,6 @@ void PagedSpace::RepairFreeListsAfterBoot() { } -bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) { - if (IsLazySweepingComplete()) return true; - - intptr_t freed_bytes = 0; - Page* p = first_unswept_page_; - do { - Page* next_page = p->next_page(); - if (ShouldBeSweptLazily(p)) { - if (FLAG_gc_verbose) { - PrintF("Sweeping 0x%" V8PRIxPTR " lazily advanced.\n", - reinterpret_cast<intptr_t>(p)); - } - DecreaseUnsweptFreeBytes(p); - freed_bytes += - MarkCompactCollector:: - SweepConservatively<MarkCompactCollector::SWEEP_SEQUENTIALLY>( - this, NULL, p); - } - p = next_page; - } while (p != anchor() && freed_bytes < bytes_to_sweep); - - if (p == anchor()) { - first_unswept_page_ = Page::FromAddress(NULL); - } else { - first_unswept_page_ = p; - } - - heap()->FreeQueuedChunks(); - - return IsLazySweepingComplete(); -} - - void PagedSpace::EvictEvacuationCandidatesFromFreeLists() { if (allocation_info_.top() >= allocation_info_.limit()) return; @@ -2629,35 +2563,13 @@ void PagedSpace::EvictEvacuationCandidatesFromFreeLists() { } -bool PagedSpace::EnsureSweeperProgress(intptr_t size_in_bytes) { - MarkCompactCollector* collector = heap()->mark_compact_collector(); - if (collector->AreSweeperThreadsActivated()) { - if (collector->IsConcurrentSweepingInProgress()) { - if (collector->RefillFreeLists(this) < size_in_bytes) { - if (!collector->sequential_sweeping()) { - collector->WaitUntilSweepingCompleted(); - return true; - } - } - return false; - } - return true; - } else { - return AdvanceSweeper(size_in_bytes); - } -} - - HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) { // Allocation in this space has failed. - // If there are unswept pages advance lazy sweeper a bounded number of times - // until we find a size_in_bytes contiguous piece of memory - const int kMaxSweepingTries = 5; - bool sweeping_complete = false; - - for (int i = 0; i < kMaxSweepingTries && !sweeping_complete; i++) { - sweeping_complete = EnsureSweeperProgress(size_in_bytes); + // If sweeper threads are active, try to re-fill the free-lists. + MarkCompactCollector* collector = heap()->mark_compact_collector(); + if (collector->IsConcurrentSweepingInProgress()) { + collector->RefillFreeList(this); // Retry the free list allocation. HeapObject* object = free_list_.Allocate(size_in_bytes); @@ -2678,12 +2590,12 @@ HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) { return free_list_.Allocate(size_in_bytes); } - // Last ditch, sweep all the remaining pages to try to find space. This may - // cause a pause. - if (!IsLazySweepingComplete()) { - EnsureSweeperProgress(kMaxInt); + // If sweeper threads are active, wait for them at that point. + if (collector->IsConcurrentSweepingInProgress()) { + collector->WaitUntilSweepingCompleted(); - // Retry the free list allocation. + // After waiting for the sweeper threads, there may be new free-list + // entries. HeapObject* object = free_list_.Allocate(size_in_bytes); if (object != NULL) return object; } @@ -2932,22 +2844,22 @@ void LargeObjectSpace::TearDown() { } -MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, - Executability executable) { +AllocationResult LargeObjectSpace::AllocateRaw(int object_size, + Executability executable) { // Check if we want to force a GC before growing the old space further. // If so, fail the allocation. if (!heap()->always_allocate() && heap()->OldGenerationAllocationLimitReached()) { - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } if (Size() + object_size > max_capacity_) { - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } LargePage* page = heap()->isolate()->memory_allocator()-> AllocateLargePage(object_size, this, executable); - if (page == NULL) return Failure::RetryAfterGC(identity()); + if (page == NULL) return AllocationResult::Retry(identity()); ASSERT(page->area_size() >= object_size); size_ += static_cast<int>(page->size()); @@ -3000,12 +2912,12 @@ size_t LargeObjectSpace::CommittedPhysicalMemory() { // GC support -MaybeObject* LargeObjectSpace::FindObject(Address a) { +Object* LargeObjectSpace::FindObject(Address a) { LargePage* page = FindPage(a); if (page != NULL) { return page->GetObject(); } - return Failure::Exception(); + return Smi::FromInt(0); // Signaling not found. } @@ -3086,7 +2998,7 @@ bool LargeObjectSpace::Contains(HeapObject* object) { bool owned = (chunk->owner() == this); - SLOW_ASSERT(!owned || !FindObject(address)->IsFailure()); + SLOW_ASSERT(!owned || FindObject(address)->IsHeapObject()); return owned; } @@ -3119,7 +3031,7 @@ void LargeObjectSpace::Verify() { object->IsFixedDoubleArray() || object->IsByteArray()); // The object itself should look OK. - object->Verify(); + object->ObjectVerify(); // Byte arrays and strings don't have interior pointers. if (object->IsCode()) { diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h index 908e72382..735f1fbbf 100644 --- a/deps/v8/src/spaces.h +++ b/deps/v8/src/spaces.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SPACES_H_ #define V8_SPACES_H_ @@ -33,7 +10,7 @@ #include "list.h" #include "log.h" #include "platform/mutex.h" -#include "v8utils.h" +#include "utils.h" namespace v8 { namespace internal { @@ -946,7 +923,7 @@ class CodeRange { // Reserves a range of virtual memory, but does not commit any of it. // Can only be called once, at heap initialization time. // Returns false on failure. - bool SetUp(const size_t requested_size); + bool SetUp(size_t requested_size); // Frees the range of virtual memory, and frees the data structures used to // manage it. @@ -1520,9 +1497,8 @@ class FreeListNode: public HeapObject { inline void Zap(); - static inline FreeListNode* cast(MaybeObject* maybe) { - ASSERT(!maybe->IsFailure()); - return reinterpret_cast<FreeListNode*>(maybe); + static inline FreeListNode* cast(Object* object) { + return reinterpret_cast<FreeListNode*>(object); } private: @@ -1693,6 +1669,47 @@ class FreeList { }; +class AllocationResult { + public: + // Implicit constructor from Object*. + AllocationResult(Object* object) : object_(object), // NOLINT + retry_space_(INVALID_SPACE) { } + + AllocationResult() : object_(NULL), + retry_space_(INVALID_SPACE) { } + + static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) { + return AllocationResult(space); + } + + inline bool IsRetry() { return retry_space_ != INVALID_SPACE; } + + template <typename T> + bool To(T** obj) { + if (IsRetry()) return false; + *obj = T::cast(object_); + return true; + } + + Object* ToObjectChecked() { + CHECK(!IsRetry()); + return object_; + } + + AllocationSpace RetrySpace() { + ASSERT(IsRetry()); + return retry_space_; + } + + private: + explicit AllocationResult(AllocationSpace space) : object_(NULL), + retry_space_(space) { } + + Object* object_; + AllocationSpace retry_space_; +}; + + class PagedSpace : public Space { public: // Creates a space with a maximum capacity, and an id. @@ -1722,10 +1739,10 @@ class PagedSpace : public Space { bool Contains(HeapObject* o) { return Contains(o->address()); } // Given an address occupied by a live object, return that object if it is - // in this space, or Failure::Exception() if it is not. The implementation - // iterates over objects in the page containing the address, the cost is - // linear in the number of objects in the page. It may be slow. - MUST_USE_RESULT MaybeObject* FindObject(Address addr); + // in this space, or a Smi if it is not. The implementation iterates over + // objects in the page containing the address, the cost is linear in the + // number of objects in the page. It may be slow. + Object* FindObject(Address addr); // During boot the free_space_map is created, and afterwards we may need // to write it into the free list nodes that were already created. @@ -1783,8 +1800,9 @@ class PagedSpace : public Space { intptr_t Available() { return free_list_.available(); } // Allocated bytes in this space. Garbage bytes that were not found due to - // lazy sweeping are counted as being allocated! The bytes in the current - // linear allocation area (between top and limit) are also counted here. + // concurrent sweeping are counted as being allocated! The bytes in the + // current linear allocation area (between top and limit) are also counted + // here. virtual intptr_t Size() { return accounting_stats_.Size(); } // As size, but the bytes in lazily swept pages are estimated and the bytes @@ -1812,7 +1830,7 @@ class PagedSpace : public Space { // Allocate the requested number of bytes in the space if possible, return a // failure object if not. - MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes); + MUST_USE_RESULT inline AllocationResult AllocateRaw(int size_in_bytes); // Give a block of memory to the space's free list. It might be added to // the free list or accounted as waste. @@ -1853,7 +1871,7 @@ class PagedSpace : public Space { void IncreaseCapacity(int size); // Releases an unused page and shrinks the space. - void ReleasePage(Page* page, bool unlink); + void ReleasePage(Page* page); // The dummy page that anchors the linked list of pages. Page* anchor() { return &anchor_; } @@ -1885,24 +1903,18 @@ class PagedSpace : public Space { // Evacuation candidates are swept by evacuator. Needs to return a valid // result before _and_ after evacuation has finished. - static bool ShouldBeSweptLazily(Page* p) { + static bool ShouldBeSweptBySweeperThreads(Page* p) { return !p->IsEvacuationCandidate() && !p->IsFlagSet(Page::RESCAN_ON_EVACUATION) && !p->WasSweptPrecisely(); } - void SetPagesToSweep(Page* first) { - ASSERT(unswept_free_bytes_ == 0); - if (first == &anchor_) first = NULL; - first_unswept_page_ = first; - } - void IncrementUnsweptFreeBytes(intptr_t by) { unswept_free_bytes_ += by; } void IncreaseUnsweptFreeBytes(Page* p) { - ASSERT(ShouldBeSweptLazily(p)); + ASSERT(ShouldBeSweptBySweeperThreads(p)); unswept_free_bytes_ += (p->area_size() - p->LiveBytes()); } @@ -1911,7 +1923,7 @@ class PagedSpace : public Space { } void DecreaseUnsweptFreeBytes(Page* p) { - ASSERT(ShouldBeSweptLazily(p)); + ASSERT(ShouldBeSweptBySweeperThreads(p)); unswept_free_bytes_ -= (p->area_size() - p->LiveBytes()); } @@ -1919,15 +1931,18 @@ class PagedSpace : public Space { unswept_free_bytes_ = 0; } - bool AdvanceSweeper(intptr_t bytes_to_sweep); - - // When parallel sweeper threads are active and the main thread finished - // its sweeping phase, this function waits for them to complete, otherwise - // AdvanceSweeper with size_in_bytes is called. + // This function tries to steal size_in_bytes memory from the sweeper threads + // free-lists. If it does not succeed stealing enough memory, it will wait + // for the sweeper threads to finish sweeping. + // It returns true when sweeping is completed and false otherwise. bool EnsureSweeperProgress(intptr_t size_in_bytes); - bool IsLazySweepingComplete() { - return !first_unswept_page_->is_valid(); + void set_end_of_unswept_pages(Page* page) { + end_of_unswept_pages_ = page; + } + + Page* end_of_unswept_pages() { + return end_of_unswept_pages_; } Page* FirstPage() { return anchor_.next_page(); } @@ -1969,15 +1984,16 @@ class PagedSpace : public Space { bool was_swept_conservatively_; - // The first page to be swept when the lazy sweeper advances. Is set - // to NULL when all pages have been swept. - Page* first_unswept_page_; - // The number of free bytes which could be reclaimed by advancing the - // lazy sweeper. This is only an estimation because lazy sweeping is - // done conservatively. + // concurrent sweeper threads. This is only an estimation because concurrent + // sweeping is done conservatively. intptr_t unswept_free_bytes_; + // The sweeper threads iterate over the list of pointer and data space pages + // and sweep these pages concurrently. They will stop sweeping after the + // end_of_unswept_pages_ page. + Page* end_of_unswept_pages_; + // Expands the space by allocating a fixed number of pages. Returns false if // it cannot allocate requested number of pages from OS, or if the hard heap // size limit has been hit. @@ -2540,7 +2556,7 @@ class NewSpace : public Space { return allocation_info_.limit_address(); } - MUST_USE_RESULT INLINE(MaybeObject* AllocateRaw(int size_in_bytes)); + MUST_USE_RESULT INLINE(AllocationResult AllocateRaw(int size_in_bytes)); // Reset the allocation pointer to the beginning of the active semispace. void ResetAllocationInfo(); @@ -2657,7 +2673,7 @@ class NewSpace : public Space { HistogramInfo* allocated_histogram_; HistogramInfo* promoted_histogram_; - MUST_USE_RESULT MaybeObject* SlowAllocateRaw(int size_in_bytes); + MUST_USE_RESULT AllocationResult SlowAllocateRaw(int size_in_bytes); friend class SemiSpaceIterator; @@ -2812,8 +2828,8 @@ class LargeObjectSpace : public Space { // Shared implementation of AllocateRaw, AllocateRawCode and // AllocateRawFixedArray. - MUST_USE_RESULT MaybeObject* AllocateRaw(int object_size, - Executability executable); + MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, + Executability executable); // Available bytes for objects in this space. inline intptr_t Available(); @@ -2841,10 +2857,9 @@ class LargeObjectSpace : public Space { return page_count_; } - // Finds an object for a given address, returns Failure::Exception() - // if it is not found. The function iterates through all objects in this - // space, may be slow. - MaybeObject* FindObject(Address a); + // Finds an object for a given address, returns a Smi if it is not found. + // The function iterates through all objects in this space, may be slow. + Object* FindObject(Address a); // Finds a large object page containing the given address, returns NULL // if such a page doesn't exist. @@ -2872,7 +2887,7 @@ class LargeObjectSpace : public Space { #endif // Checks whether an address is in the object area in this space. It // iterates all objects in the space. May be slow. - bool SlowContains(Address addr) { return !FindObject(addr)->IsFailure(); } + bool SlowContains(Address addr) { return FindObject(addr)->IsHeapObject(); } private: intptr_t max_capacity_; diff --git a/deps/v8/src/splay-tree-inl.h b/deps/v8/src/splay-tree-inl.h index 42024756e..fe40f276c 100644 --- a/deps/v8/src/splay-tree-inl.h +++ b/deps/v8/src/splay-tree-inl.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SPLAY_TREE_INL_H_ #define V8_SPLAY_TREE_INL_H_ diff --git a/deps/v8/src/splay-tree.h b/deps/v8/src/splay-tree.h index f393027a8..77f05b010 100644 --- a/deps/v8/src/splay-tree.h +++ b/deps/v8/src/splay-tree.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SPLAY_TREE_H_ #define V8_SPLAY_TREE_H_ diff --git a/deps/v8/src/store-buffer-inl.h b/deps/v8/src/store-buffer-inl.h index 7e5432c84..709415e45 100644 --- a/deps/v8/src/store-buffer-inl.h +++ b/deps/v8/src/store-buffer-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STORE_BUFFER_INL_H_ #define V8_STORE_BUFFER_INL_H_ diff --git a/deps/v8/src/store-buffer.cc b/deps/v8/src/store-buffer.cc index a1479b2b9..b0f7d2f80 100644 --- a/deps/v8/src/store-buffer.cc +++ b/deps/v8/src/store-buffer.cc @@ -1,37 +1,14 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "store-buffer.h" #include <algorithm> #include "v8.h" +#include "counters.h" #include "store-buffer-inl.h" -#include "v8-counters.h" namespace v8 { namespace internal { @@ -388,7 +365,9 @@ void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { // When we are not in GC the Heap::InNewSpace() predicate // checks that pointers which satisfy predicate point into // the active semispace. - heap_->InNewSpace(*slot); + Object* object = reinterpret_cast<Object*>( + NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); + heap_->InNewSpace(object); slot_address += kPointerSize; } } @@ -427,14 +406,18 @@ void StoreBuffer::FindPointersToNewSpaceInRegion( slot_address < end; slot_address += kPointerSize) { Object** slot = reinterpret_cast<Object**>(slot_address); - if (heap_->InNewSpace(*slot)) { - HeapObject* object = reinterpret_cast<HeapObject*>(*slot); - ASSERT(object->IsHeapObject()); + Object* object = reinterpret_cast<Object*>( + NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); + if (heap_->InNewSpace(object)) { + HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); + ASSERT(heap_object->IsHeapObject()); // The new space object was not promoted if it still contains a map // pointer. Clear the map field now lazily. - if (clear_maps) ClearDeadObject(object); - slot_callback(reinterpret_cast<HeapObject**>(slot), object); - if (heap_->InNewSpace(*slot)) { + if (clear_maps) ClearDeadObject(heap_object); + slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); + object = reinterpret_cast<Object*>( + NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); + if (heap_->InNewSpace(object)) { EnterDirectlyIntoStoreBuffer(slot_address); } } @@ -502,9 +485,9 @@ void StoreBuffer::FindPointersToNewSpaceInMapsRegion( // This function iterates over all the pointers in a paged space in the heap, // looking for pointers into new space. Within the pages there may be dead // objects that have not been overwritten by free spaces or fillers because of -// lazy sweeping. These dead objects may not contain pointers to new space. -// The garbage areas that have been swept properly (these will normally be the -// large ones) will be marked with free space and filler map words. In +// concurrent sweeping. These dead objects may not contain pointers to new +// space. The garbage areas that have been swept properly (these will normally +// be the large ones) will be marked with free space and filler map words. In // addition any area that has never been used at all for object allocation must // be marked with a free space or filler. Because the free space and filler // maps do not move we can always recognize these even after a compaction. @@ -531,7 +514,11 @@ void StoreBuffer::FindPointersToNewSpaceOnPage( Object* constant_pool_array_map = heap_->constant_pool_array_map(); while (visitable_end < end_of_page) { - Object* o = *reinterpret_cast<Object**>(visitable_end); + // The sweeper thread concurrently may write free space maps and size to + // this page. We need acquire load here to make sure that we get a + // consistent view of maps and their sizes. + Object* o = reinterpret_cast<Object*>( + Acquire_Load(reinterpret_cast<AtomicWord*>(visitable_end))); // Skip fillers or constant pool arrays (which never contain new-space // pointers but can contain pointers which can be confused for fillers) // but not things that look like fillers in the special garbage section @@ -595,14 +582,17 @@ void StoreBuffer::IteratePointersInStoreBuffer( Address* saved_top = old_top_; #endif Object** slot = reinterpret_cast<Object**>(*current); - Object* object = *slot; + Object* object = reinterpret_cast<Object*>( + NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); if (heap_->InFromSpace(object)) { HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); // The new space object was not promoted if it still contains a map // pointer. Clear the map field now lazily. if (clear_maps) ClearDeadObject(heap_object); slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); - if (heap_->InNewSpace(*slot)) { + object = reinterpret_cast<Object*>( + NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); + if (heap_->InNewSpace(object)) { EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); } } diff --git a/deps/v8/src/store-buffer.h b/deps/v8/src/store-buffer.h index 01e7cbeb8..bf3a3f737 100644 --- a/deps/v8/src/store-buffer.h +++ b/deps/v8/src/store-buffer.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STORE_BUFFER_H_ #define V8_STORE_BUFFER_H_ diff --git a/deps/v8/src/string-search.cc b/deps/v8/src/string-search.cc index 3ae68b5d4..38dacc9cc 100644 --- a/deps/v8/src/string-search.cc +++ b/deps/v8/src/string-search.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" #include "string-search.h" diff --git a/deps/v8/src/string-search.h b/deps/v8/src/string-search.h index bc685ffe5..09bc36ef8 100644 --- a/deps/v8/src/string-search.h +++ b/deps/v8/src/string-search.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STRING_SEARCH_H_ #define V8_STRING_SEARCH_H_ diff --git a/deps/v8/src/string-stream.cc b/deps/v8/src/string-stream.cc index e2d15f540..25e340b4a 100644 --- a/deps/v8/src/string-stream.cc +++ b/deps/v8/src/string-stream.cc @@ -1,35 +1,11 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "v8.h" - -#include "factory.h" +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + #include "string-stream.h" +#include "handles-inl.h" + namespace v8 { namespace internal { @@ -290,7 +266,7 @@ void StringStream::OutputToFile(FILE* out) { Handle<String> StringStream::ToString(Isolate* isolate) { return isolate->factory()->NewStringFromUtf8( - Vector<const char>(buffer_, length_)); + Vector<const char>(buffer_, length_)).ToHandleChecked(); } diff --git a/deps/v8/src/string-stream.h b/deps/v8/src/string-stream.h index e3db2a8a8..ecc0e80f5 100644 --- a/deps/v8/src/string-stream.h +++ b/deps/v8/src/string-stream.h @@ -1,40 +1,18 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STRING_STREAM_H_ #define V8_STRING_STREAM_H_ +#include "handles.h" + namespace v8 { namespace internal { - class StringAllocator { public: - virtual ~StringAllocator() {} + virtual ~StringAllocator() { } // Allocate a number of bytes. virtual char* allocate(unsigned bytes) = 0; // Allocate a larger number of bytes and copy the old buffer to the new one. @@ -46,11 +24,12 @@ class StringAllocator { // Normal allocator uses new[] and delete[]. -class HeapStringAllocator: public StringAllocator { +class HeapStringAllocator V8_FINAL : public StringAllocator { public: ~HeapStringAllocator() { DeleteArray(space_); } - char* allocate(unsigned bytes); - char* grow(unsigned* bytes); + virtual char* allocate(unsigned bytes) V8_OVERRIDE; + virtual char* grow(unsigned* bytes) V8_OVERRIDE; + private: char* space_; }; @@ -59,18 +38,19 @@ class HeapStringAllocator: public StringAllocator { // Allocator for use when no new c++ heap allocation is allowed. // Given a preallocated buffer up front and does no allocation while // building message. -class NoAllocationStringAllocator: public StringAllocator { +class NoAllocationStringAllocator V8_FINAL : public StringAllocator { public: NoAllocationStringAllocator(char* memory, unsigned size); - char* allocate(unsigned bytes) { return space_; } - char* grow(unsigned* bytes); + virtual char* allocate(unsigned bytes) V8_OVERRIDE { return space_; } + virtual char* grow(unsigned* bytes) V8_OVERRIDE; + private: unsigned size_; char* space_; }; -class FmtElm { +class FmtElm V8_FINAL { public: FmtElm(int value) : type_(INT) { // NOLINT data_.u_int_ = value; @@ -110,7 +90,7 @@ class FmtElm { }; -class StringStream { +class StringStream V8_FINAL { public: explicit StringStream(StringAllocator* allocator): allocator_(allocator), @@ -120,9 +100,6 @@ class StringStream { buffer_[0] = 0; } - ~StringStream() { - } - bool Put(char c); bool Put(String* str); bool Put(String* str, int start, int end); @@ -175,7 +152,6 @@ class StringStream { static bool IsMentionedObjectCacheClear(Isolate* isolate); #endif - static const int kInitialCapacity = 16; private: @@ -194,7 +170,7 @@ class StringStream { // Utility class to print a list of items to a stream, divided by a separator. -class SimpleListPrinter { +class SimpleListPrinter V8_FINAL { public: explicit SimpleListPrinter(StringStream* stream, char separator = ',') { separator_ = separator; @@ -217,7 +193,6 @@ class SimpleListPrinter { StringStream* stream_; }; - } } // namespace v8::internal #endif // V8_STRING_STREAM_H_ diff --git a/deps/v8/src/string.js b/deps/v8/src/string.js index 74230c986..9c9042783 100644 --- a/deps/v8/src/string.js +++ b/deps/v8/src/string.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declaration has been made // in runtime.js: @@ -173,7 +150,6 @@ function StringMatch(regexp) { var lastIndex = regexp.lastIndex; TO_INTEGER_FOR_SIDE_EFFECT(lastIndex); if (!regexp.global) return RegExpExecNoTests(regexp, subject, 0); - %_Log('regexp', 'regexp-match,%0S,%1r', [subject, regexp]); // lastMatchInfo is defined in regexp.js. var result = %StringMatch(subject, regexp, lastMatchInfo); if (result !== null) lastMatchInfoOverride = null; @@ -244,7 +220,6 @@ function StringReplace(search, replace) { // value is discarded. var lastIndex = search.lastIndex; TO_INTEGER_FOR_SIDE_EFFECT(lastIndex); - %_Log('regexp', 'regexp-replace,%0r,%1S', [search, subject]); if (!IS_SPEC_FUNCTION(replace)) { replace = TO_STRING_INLINE(replace); @@ -646,8 +621,6 @@ function StringSplit(separator, limit) { var ArrayPushBuiltin = $Array.prototype.push; function StringSplitOnRegExp(subject, separator, limit, length) { - %_Log('regexp', 'regexp-split,%0S,%1r', [subject, separator]); - if (length === 0) { if (DoRegExpExec(separator, subject, 0, 0) != null) { return []; @@ -658,7 +631,7 @@ function StringSplitOnRegExp(subject, separator, limit, length) { var currentIndex = 0; var startIndex = 0; var startMatch = 0; - var result = []; + var result = new InternalArray(); outer_loop: while (true) { @@ -703,7 +676,9 @@ function StringSplitOnRegExp(subject, separator, limit, length) { startIndex = currentIndex = endIndex; } - return result; + var array_result = []; + %MoveArrayContents(result, array_result); + return array_result; } diff --git a/deps/v8/src/strtod.cc b/deps/v8/src/strtod.cc index d332fd2bc..aac74199a 100644 --- a/deps/v8/src/strtod.cc +++ b/deps/v8/src/strtod.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> #include <cmath> diff --git a/deps/v8/src/strtod.h b/deps/v8/src/strtod.h index 1a5a96c8e..f4ce731a1 100644 --- a/deps/v8/src/strtod.h +++ b/deps/v8/src/strtod.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STRTOD_H_ #define V8_STRTOD_H_ diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc index ff641dddf..6bf209bc0 100644 --- a/deps/v8/src/stub-cache.cc +++ b/deps/v8/src/stub-cache.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -335,7 +312,7 @@ Handle<Code> StubCache::ComputeCompareNil(Handle<Map> receiver_map, Code::FindAndReplacePattern pattern; pattern.Add(isolate_->factory()->meta_map(), receiver_map); - Handle<Code> ic = stub.GetCodeCopy(isolate_, pattern); + Handle<Code> ic = stub.GetCodeCopy(pattern); if (!receiver_map->is_shared()) { Map::UpdateCodeCache(receiver_map, name, ic); @@ -478,7 +455,7 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, // StubCompiler implementation. -RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) { +RUNTIME_FUNCTION(StoreCallbackProperty) { JSObject* receiver = JSObject::cast(args[0]); JSObject* holder = JSObject::cast(args[1]); ExecutableAccessorInfo* callback = ExecutableAccessorInfo::cast(args[2]); @@ -499,7 +476,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) { PropertyCallbackArguments custom_args(isolate, callback->data(), receiver, holder); custom_args.Call(fun, v8::Utils::ToLocal(str), v8::Utils::ToLocal(value)); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); return *value; } @@ -511,7 +488,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) { * Returns |Heap::no_interceptor_result_sentinel()| if interceptor doesn't * provide any value for the given name. */ -RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) { +RUNTIME_FUNCTION(LoadPropertyWithInterceptorOnly) { ASSERT(args.length() == StubCache::kInterceptorArgsLength); Handle<Name> name_handle = args.at<Name>(StubCache::kInterceptorArgsNameIndex); @@ -539,7 +516,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) { HandleScope scope(isolate); v8::Handle<v8::Value> r = callback_args.Call(getter, v8::Utils::ToLocal(name)); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); + RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate); if (!r.IsEmpty()) { Handle<Object> result = v8::Utils::OpenHandle(*r); result->VerifyApiCallResultType(); @@ -551,7 +528,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) { } -static MaybeObject* ThrowReferenceError(Isolate* isolate, Name* name) { +static Object* ThrowReferenceError(Isolate* isolate, Name* name) { // If the load is non-contextual, just return the undefined result. // Note that both keyed and non-keyed loads may end up here. HandleScope scope(isolate); @@ -569,8 +546,9 @@ static MaybeObject* ThrowReferenceError(Isolate* isolate, Name* name) { } -static Handle<Object> LoadWithInterceptor(Arguments* args, - PropertyAttributes* attrs) { +MUST_USE_RESULT static MaybeHandle<Object> LoadWithInterceptor( + Arguments* args, + PropertyAttributes* attrs) { ASSERT(args->length() == StubCache::kInterceptorArgsLength); Handle<Name> name_handle = args->at<Name>(StubCache::kInterceptorArgsNameIndex); @@ -604,7 +582,7 @@ static Handle<Object> LoadWithInterceptor(Arguments* args, // Use the interceptor getter. v8::Handle<v8::Value> r = callback_args.Call(getter, v8::Utils::ToLocal(name)); - RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); + RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); if (!r.IsEmpty()) { *attrs = NONE; Handle<Object> result = v8::Utils::OpenHandle(*r); @@ -613,9 +591,8 @@ static Handle<Object> LoadWithInterceptor(Arguments* args, } } - Handle<Object> result = JSObject::GetPropertyPostInterceptor( + return JSObject::GetPropertyPostInterceptor( holder_handle, receiver_handle, name_handle, attrs); - return result; } @@ -623,11 +600,12 @@ static Handle<Object> LoadWithInterceptor(Arguments* args, * Loads a property with an interceptor performing post interceptor * lookup if interceptor failed. */ -RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad) { +RUNTIME_FUNCTION(LoadPropertyWithInterceptorForLoad) { PropertyAttributes attr = NONE; HandleScope scope(isolate); - Handle<Object> result = LoadWithInterceptor(&args, &attr); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, LoadWithInterceptor(&args, &attr)); // If the property is present, return it. if (attr != ABSENT) return *result; @@ -635,11 +613,12 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad) { } -RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall) { +RUNTIME_FUNCTION(LoadPropertyWithInterceptorForCall) { PropertyAttributes attr; HandleScope scope(isolate); - Handle<Object> result = LoadWithInterceptor(&args, &attr); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, LoadWithInterceptor(&args, &attr)); // This is call IC. In this case, we simply return the undefined result which // will lead to an exception when trying to invoke the result as a // function. @@ -647,7 +626,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall) { } -RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty) { +RUNTIME_FUNCTION(StoreInterceptorProperty) { HandleScope scope(isolate); ASSERT(args.length() == 3); StoreIC ic(IC::NO_EXTRA_FRAME, isolate); @@ -656,21 +635,24 @@ RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty) { Handle<Object> value = args.at<Object>(2); ASSERT(receiver->HasNamedInterceptor()); PropertyAttributes attr = NONE; - Handle<Object> result = JSObject::SetPropertyWithInterceptor( - receiver, name, value, attr, ic.strict_mode()); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::SetPropertyWithInterceptor( + receiver, name, value, attr, ic.strict_mode())); return *result; } -RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor) { +RUNTIME_FUNCTION(KeyedLoadPropertyWithInterceptor) { HandleScope scope(isolate); Handle<JSObject> receiver = args.at<JSObject>(0); ASSERT(args.smi_at(1) >= 0); uint32_t index = args.smi_at(1); - Handle<Object> result = - JSObject::GetElementWithInterceptor(receiver, receiver, index); - RETURN_IF_EMPTY_HANDLE(isolate, result); + Handle<Object> result; + ASSIGN_RETURN_FAILURE_ON_EXCEPTION( + isolate, result, + JSObject::GetElementWithInterceptor(receiver, receiver, index)); return *result; } @@ -777,10 +759,10 @@ Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags, void StubCompiler::LookupPostInterceptor(Handle<JSObject> holder, Handle<Name> name, LookupResult* lookup) { - holder->LocalLookupRealNamedProperty(*name, lookup); + holder->LocalLookupRealNamedProperty(name, lookup); if (lookup->IsFound()) return; if (holder->GetPrototype()->IsNull()) return; - holder->GetPrototype()->Lookup(*name, lookup); + holder->GetPrototype()->Lookup(name, lookup); } @@ -802,9 +784,6 @@ Register LoadStubCompiler::HandlerFrontendHeader( } else if (type->Is(HeapType::Number())) { function_index = Context::NUMBER_FUNCTION_INDEX; } else if (type->Is(HeapType::Boolean())) { - // Booleans use the generic oddball map, so an additional check is needed to - // ensure the receiver is really a boolean. - GenerateBooleanCheck(object_reg, miss); function_index = Context::BOOLEAN_FUNCTION_INDEX; } else { check_type = SKIP_RECEIVER; @@ -886,7 +865,7 @@ void LoadStubCompiler::NonexistentHandlerFrontend(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(last.is_null() || - last->property_dictionary()->FindEntry(*name) == + last->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); GenerateDictionaryNegativeLookup(masm(), &miss, holder, name, scratch2(), scratch3()); @@ -896,7 +875,7 @@ void LoadStubCompiler::NonexistentHandlerFrontend(Handle<HeapType> type, // check that the global property cell is empty. if (last_map->IsJSGlobalObjectMap()) { Handle<JSGlobalObject> global = last.is_null() - ? Handle<JSGlobalObject>::cast(type->AsConstant()) + ? Handle<JSGlobalObject>::cast(type->AsConstant()->Value()) : Handle<JSGlobalObject>::cast(last); GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); } @@ -1183,13 +1162,14 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( receiver_map->has_external_array_elements() || receiver_map->has_fixed_typed_array_elements()) { Handle<Code> stub = KeyedLoadFastElementStub( + isolate(), receiver_map->instance_type() == JS_ARRAY_TYPE, - elements_kind).GetCode(isolate()); + elements_kind).GetCode(); __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK); } else { Handle<Code> stub = FLAG_compiled_keyed_dictionary_loads - ? KeyedLoadDictionaryElementStub().GetCode(isolate()) - : KeyedLoadDictionaryElementPlatformStub().GetCode(isolate()); + ? KeyedLoadDictionaryElementStub(isolate()).GetCode() + : KeyedLoadDictionaryElementPlatformStub(isolate()).GetCode(); __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK); } @@ -1209,13 +1189,15 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( receiver_map->has_external_array_elements() || receiver_map->has_fixed_typed_array_elements()) { stub = KeyedStoreFastElementStub( + isolate(), is_jsarray, elements_kind, - store_mode()).GetCode(isolate()); + store_mode()).GetCode(); } else { - stub = KeyedStoreElementStub(is_jsarray, + stub = KeyedStoreElementStub(isolate(), + is_jsarray, elements_kind, - store_mode()).GetCode(isolate()); + store_mode()).GetCode(); } __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK); @@ -1272,6 +1254,7 @@ Handle<Code> BaseLoadStoreStubCompiler::GetICCode(Code::Kind kind, InlineCacheState state) { Code::Flags flags = Code::ComputeFlags(kind, state, extra_state(), type); Handle<Code> code = GetCodeWithFlags(flags, name); + IC::RegisterWeakMapDependency(code); PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name)); JitEvent(name, code); return code; @@ -1308,13 +1291,15 @@ void KeyedLoadStubCompiler::CompileElementHandlers(MapHandleList* receiver_maps, IsExternalArrayElementsKind(elements_kind) || IsFixedTypedArrayElementsKind(elements_kind)) { cached_stub = - KeyedLoadFastElementStub(is_js_array, - elements_kind).GetCode(isolate()); + KeyedLoadFastElementStub(isolate(), + is_js_array, + elements_kind).GetCode(); } else if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) { cached_stub = isolate()->builtins()->KeyedLoadIC_SloppyArguments(); } else { ASSERT(elements_kind == DICTIONARY_ELEMENTS); - cached_stub = KeyedLoadDictionaryElementStub().GetCode(isolate()); + cached_stub = + KeyedLoadDictionaryElementStub(isolate()).GetCode(); } } @@ -1343,10 +1328,11 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic( ElementsKind elements_kind = receiver_map->elements_kind(); if (!transitioned_map.is_null()) { cached_stub = ElementsTransitionAndStoreStub( + isolate(), elements_kind, transitioned_map->elements_kind(), is_js_array, - store_mode()).GetCode(isolate()); + store_mode()).GetCode(); } else if (receiver_map->instance_type() < FIRST_JS_RECEIVER_TYPE) { cached_stub = isolate()->builtins()->KeyedStoreIC_Slow(); } else { @@ -1354,14 +1340,16 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic( receiver_map->has_external_array_elements() || receiver_map->has_fixed_typed_array_elements()) { cached_stub = KeyedStoreFastElementStub( + isolate(), is_js_array, elements_kind, - store_mode()).GetCode(isolate()); + store_mode()).GetCode(); } else { cached_stub = KeyedStoreElementStub( + isolate(), is_js_array, elements_kind, - store_mode()).GetCode(isolate()); + store_mode()).GetCode(); } } ASSERT(!cached_stub.is_null()); diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h index 7a304fe71..707df6c7b 100644 --- a/deps/v8/src/stub-cache.h +++ b/deps/v8/src/stub-cache.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_STUB_CACHE_H_ #define V8_STUB_CACHE_H_ @@ -288,15 +265,15 @@ class StubCache { // Support functions for IC stubs for callbacks. -DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty); +DECLARE_RUNTIME_FUNCTION(StoreCallbackProperty); // Support functions for IC stubs for interceptors. -DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor); +DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptorOnly); +DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptorForLoad); +DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptorForCall); +DECLARE_RUNTIME_FUNCTION(StoreInterceptorProperty); +DECLARE_RUNTIME_FUNCTION(KeyedLoadPropertyWithInterceptor); enum PrototypeCheckType { CHECK_ALL_MAPS, SKIP_RECEIVER }; @@ -309,7 +286,7 @@ class StubCompiler BASE_EMBEDDED { explicit StubCompiler(Isolate* isolate, ExtraICState extra_ic_state = kNoExtraICState) : isolate_(isolate), extra_ic_state_(extra_ic_state), - masm_(isolate, NULL, 256), failure_(NULL) { } + masm_(isolate, NULL, 256) { } Handle<Code> CompileLoadInitialize(Code::Flags flags); Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags); @@ -399,8 +376,6 @@ class StubCompiler BASE_EMBEDDED { Label* miss, PrototypeCheckType check = CHECK_ALL_MAPS); - void GenerateBooleanCheck(Register object, Label* miss); - static void GenerateFastApiCall(MacroAssembler* masm, const CallOptimization& optimization, Handle<Map> receiver_map, @@ -417,7 +392,6 @@ class StubCompiler BASE_EMBEDDED { ExtraICState extra_state() { return extra_ic_state_; } MacroAssembler* masm() { return &masm_; } - void set_failure(Failure* failure) { failure_ = failure; } static void LookupPostInterceptor(Handle<JSObject> holder, Handle<Name> name, @@ -433,7 +407,6 @@ class StubCompiler BASE_EMBEDDED { Isolate* isolate_; const ExtraICState extra_ic_state_; MacroAssembler masm_; - Failure* failure_; }; diff --git a/deps/v8/src/sweeper-thread.cc b/deps/v8/src/sweeper-thread.cc index 7e8305abe..e8c8cd68d 100644 --- a/deps/v8/src/sweeper-thread.cc +++ b/deps/v8/src/sweeper-thread.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "sweeper-thread.h" @@ -89,6 +66,15 @@ void SweeperThread::WaitForSweeperThread() { } +bool SweeperThread::SweepingCompleted() { + bool value = end_sweeping_semaphore_.WaitFor(TimeDelta::FromSeconds(0)); + if (value) { + end_sweeping_semaphore_.Signal(); + } + return value; +} + + int SweeperThread::NumberOfThreads(int max_available) { if (!FLAG_concurrent_sweeping && !FLAG_parallel_sweeping) return 0; if (FLAG_sweeper_threads > 0) return FLAG_sweeper_threads; diff --git a/deps/v8/src/sweeper-thread.h b/deps/v8/src/sweeper-thread.h index 3f7917b03..794e660aa 100644 --- a/deps/v8/src/sweeper-thread.h +++ b/deps/v8/src/sweeper-thread.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_SWEEPER_THREAD_H_ #define V8_SWEEPER_THREAD_H_ @@ -31,7 +8,7 @@ #include "atomicops.h" #include "flags.h" #include "platform.h" -#include "v8utils.h" +#include "utils.h" #include "spaces.h" @@ -49,6 +26,7 @@ class SweeperThread : public Thread { void Stop(); void StartSweeping(); void WaitForSweeperThread(); + bool SweepingCompleted(); static int NumberOfThreads(int max_available); diff --git a/deps/v8/src/symbol.js b/deps/v8/src/symbol.js index e7ea5a68d..1c4830202 100644 --- a/deps/v8/src/symbol.js +++ b/deps/v8/src/symbol.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; diff --git a/deps/v8/src/third_party/vtune/vtune-jit.cc b/deps/v8/src/third_party/vtune/vtune-jit.cc index ea897e58e..023dd1864 100644 --- a/deps/v8/src/third_party/vtune/vtune-jit.cc +++ b/deps/v8/src/third_party/vtune/vtune-jit.cc @@ -196,8 +196,9 @@ void VTUNEJITInterface::event_handler(const v8::JitCodeEvent* event) { if (*script != NULL) { // Get the source file name and set it to jmethod.source_file_name - if ((*script->GetScriptName())->IsString()) { - Handle<String> script_name = script->GetScriptName()->ToString(); + if ((*script->GetUnboundScript()->GetScriptName())->IsString()) { + Handle<String> script_name = + script->GetUnboundScript()->GetScriptName()->ToString(); temp_file_name = new char[script_name->Utf8Length() + 1]; script_name->WriteUtf8(temp_file_name); jmethod.source_file_name = temp_file_name; @@ -224,7 +225,7 @@ void VTUNEJITInterface::event_handler(const v8::JitCodeEvent* event) { jmethod.line_number_table[index].Offset = static_cast<unsigned int>(Iter->pc_); jmethod.line_number_table[index++].LineNumber = - script->GetLineNumber(Iter->pos_)+1; + script->GetUnboundScript()->GetLineNumber(Iter->pos_)+1; } GetEntries()->erase(event->code_start); } diff --git a/deps/v8/src/token.cc b/deps/v8/src/token.cc index 7ba7ed342..2215d3968 100644 --- a/deps/v8/src/token.cc +++ b/deps/v8/src/token.cc @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "../include/v8stdint.h" #include "token.h" diff --git a/deps/v8/src/token.h b/deps/v8/src/token.h index 8efaa477b..822608d69 100644 --- a/deps/v8/src/token.h +++ b/deps/v8/src/token.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TOKEN_H_ #define V8_TOKEN_H_ diff --git a/deps/v8/src/transitions-inl.h b/deps/v8/src/transitions-inl.h index dc1620a07..899819860 100644 --- a/deps/v8/src/transitions-inl.h +++ b/deps/v8/src/transitions-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TRANSITIONS_INL_H_ #define V8_TRANSITIONS_INL_H_ @@ -88,12 +65,6 @@ FixedArray* TransitionArray::GetPrototypeTransitions() { } -HeapObject* TransitionArray::UncheckedPrototypeTransitions() { - ASSERT(HasPrototypeTransitions()); - return reinterpret_cast<HeapObject*>(get(kPrototypeTransitionsIndex)); -} - - void TransitionArray::SetPrototypeTransitions(FixedArray* transitions, WriteBarrierMode mode) { ASSERT(IsFullTransitionArray()); diff --git a/deps/v8/src/transitions.cc b/deps/v8/src/transitions.cc index 9d3f03894..7fd56cbe2 100644 --- a/deps/v8/src/transitions.cc +++ b/deps/v8/src/transitions.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -35,23 +12,21 @@ namespace v8 { namespace internal { -static MaybeObject* AllocateRaw(Isolate* isolate, int length) { - // Use FixedArray to not use TransitionArray::cast on incomplete object. - FixedArray* array; - MaybeObject* maybe_array = isolate->heap()->AllocateFixedArray(length); - if (!maybe_array->To(&array)) return maybe_array; - return array; +Handle<TransitionArray> TransitionArray::Allocate(Isolate* isolate, + int number_of_transitions) { + Handle<FixedArray> array = + isolate->factory()->NewFixedArray(ToKeyIndex(number_of_transitions)); + array->set(kPrototypeTransitionsIndex, Smi::FromInt(0)); + return Handle<TransitionArray>::cast(array); } -MaybeObject* TransitionArray::Allocate(Isolate* isolate, - int number_of_transitions) { - FixedArray* array; - MaybeObject* maybe_array = - AllocateRaw(isolate, ToKeyIndex(number_of_transitions)); - if (!maybe_array->To(&array)) return maybe_array; - array->set(kPrototypeTransitionsIndex, Smi::FromInt(0)); - return array; +Handle<TransitionArray> TransitionArray::AllocateSimple(Isolate* isolate, + Handle<Map> target) { + Handle<FixedArray> array = + isolate->factory()->NewFixedArray(kSimpleTransitionSize); + array->set(kSimpleTransitionTarget, *target); + return Handle<TransitionArray>::cast(array); } @@ -69,86 +44,111 @@ static bool InsertionPointFound(Name* key1, Name* key2) { } -MaybeObject* TransitionArray::NewWith(SimpleTransitionFlag flag, - Name* key, - Map* target, - Object* back_pointer) { - TransitionArray* result; - MaybeObject* maybe_result; +Handle<TransitionArray> TransitionArray::NewWith(Handle<Map> map, + Handle<Name> name, + Handle<Map> target, + SimpleTransitionFlag flag) { + Handle<TransitionArray> result; + Isolate* isolate = name->GetIsolate(); if (flag == SIMPLE_TRANSITION) { - maybe_result = AllocateRaw(target->GetIsolate(), kSimpleTransitionSize); - if (!maybe_result->To(&result)) return maybe_result; - result->set(kSimpleTransitionTarget, target); + result = AllocateSimple(isolate, target); } else { - maybe_result = Allocate(target->GetIsolate(), 1); - if (!maybe_result->To(&result)) return maybe_result; - result->NoIncrementalWriteBarrierSet(0, key, target); + result = Allocate(isolate, 1); + result->NoIncrementalWriteBarrierSet(0, *name, *target); } - result->set_back_pointer_storage(back_pointer); + result->set_back_pointer_storage(map->GetBackPointer()); return result; } -MaybeObject* TransitionArray::ExtendToFullTransitionArray() { - ASSERT(!IsFullTransitionArray()); - int nof = number_of_transitions(); - TransitionArray* result; - MaybeObject* maybe_result = Allocate(GetIsolate(), nof); - if (!maybe_result->To(&result)) return maybe_result; +Handle<TransitionArray> TransitionArray::ExtendToFullTransitionArray( + Handle<Map> containing_map) { + ASSERT(!containing_map->transitions()->IsFullTransitionArray()); + int nof = containing_map->transitions()->number_of_transitions(); - if (nof == 1) { - result->NoIncrementalWriteBarrierCopyFrom(this, kSimpleTransitionIndex, 0); + // A transition array may shrink during GC. + Handle<TransitionArray> result = Allocate(containing_map->GetIsolate(), nof); + DisallowHeapAllocation no_gc; + int new_nof = containing_map->transitions()->number_of_transitions(); + if (new_nof != nof) { + ASSERT(new_nof == 0); + result->Shrink(ToKeyIndex(0)); + } else if (nof == 1) { + result->NoIncrementalWriteBarrierCopyFrom( + containing_map->transitions(), kSimpleTransitionIndex, 0); } - result->set_back_pointer_storage(back_pointer_storage()); + result->set_back_pointer_storage( + containing_map->transitions()->back_pointer_storage()); return result; } -MaybeObject* TransitionArray::CopyInsert(Name* name, Map* target) { - TransitionArray* result; +Handle<TransitionArray> TransitionArray::CopyInsert(Handle<Map> map, + Handle<Name> name, + Handle<Map> target, + SimpleTransitionFlag flag) { + if (!map->HasTransitionArray()) { + return TransitionArray::NewWith(map, name, target, flag); + } - int number_of_transitions = this->number_of_transitions(); + int number_of_transitions = map->transitions()->number_of_transitions(); int new_size = number_of_transitions; - int insertion_index = this->Search(name); + int insertion_index = map->transitions()->Search(*name); if (insertion_index == kNotFound) ++new_size; - MaybeObject* maybe_array; - maybe_array = TransitionArray::Allocate(GetIsolate(), new_size); - if (!maybe_array->To(&result)) return maybe_array; + Handle<TransitionArray> result = Allocate(map->GetIsolate(), new_size); + + // The map's transition array may grown smaller during the allocation above as + // it was weakly traversed, though it is guaranteed not to disappear. Trim the + // result copy if needed, and recompute variables. + ASSERT(map->HasTransitionArray()); + DisallowHeapAllocation no_gc; + TransitionArray* array = map->transitions(); + if (array->number_of_transitions() != number_of_transitions) { + ASSERT(array->number_of_transitions() < number_of_transitions); + + number_of_transitions = array->number_of_transitions(); + new_size = number_of_transitions; + + insertion_index = array->Search(*name); + if (insertion_index == kNotFound) ++new_size; + + result->Shrink(ToKeyIndex(new_size)); + } - if (HasPrototypeTransitions()) { - result->SetPrototypeTransitions(GetPrototypeTransitions()); + if (array->HasPrototypeTransitions()) { + result->SetPrototypeTransitions(array->GetPrototypeTransitions()); } if (insertion_index != kNotFound) { for (int i = 0; i < number_of_transitions; ++i) { if (i != insertion_index) { - result->NoIncrementalWriteBarrierCopyFrom(this, i, i); + result->NoIncrementalWriteBarrierCopyFrom(array, i, i); } } - result->NoIncrementalWriteBarrierSet(insertion_index, name, target); - result->set_back_pointer_storage(back_pointer_storage()); + result->NoIncrementalWriteBarrierSet(insertion_index, *name, *target); + result->set_back_pointer_storage(array->back_pointer_storage()); return result; } insertion_index = 0; for (; insertion_index < number_of_transitions; ++insertion_index) { - if (InsertionPointFound(GetKey(insertion_index), name)) break; + if (InsertionPointFound(array->GetKey(insertion_index), *name)) break; result->NoIncrementalWriteBarrierCopyFrom( - this, insertion_index, insertion_index); + array, insertion_index, insertion_index); } - result->NoIncrementalWriteBarrierSet(insertion_index, name, target); + result->NoIncrementalWriteBarrierSet(insertion_index, *name, *target); for (; insertion_index < number_of_transitions; ++insertion_index) { result->NoIncrementalWriteBarrierCopyFrom( - this, insertion_index, insertion_index + 1); + array, insertion_index, insertion_index + 1); } - result->set_back_pointer_storage(back_pointer_storage()); + result->set_back_pointer_storage(array->back_pointer_storage()); return result; } diff --git a/deps/v8/src/transitions.h b/deps/v8/src/transitions.h index b2e983967..7d8e551a5 100644 --- a/deps/v8/src/transitions.h +++ b/deps/v8/src/transitions.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TRANSITIONS_H_ #define V8_TRANSITIONS_H_ @@ -85,7 +62,6 @@ class TransitionArray: public FixedArray { WriteBarrierMode mode = UPDATE_WRITE_BARRIER); inline Object** GetPrototypeTransitionsSlot(); inline bool HasPrototypeTransitions(); - inline HeapObject* UncheckedPrototypeTransitions(); // Returns the number of transitions in the array. int number_of_transitions() { @@ -96,30 +72,25 @@ class TransitionArray: public FixedArray { inline int number_of_entries() { return number_of_transitions(); } - // Allocate a new transition array with a single entry. - static MUST_USE_RESULT MaybeObject* NewWith( - SimpleTransitionFlag flag, - Name* key, - Map* target, - Object* back_pointer); + // Creates a FullTransitionArray from a SimpleTransitionArray in + // containing_map. + static Handle<TransitionArray> ExtendToFullTransitionArray( + Handle<Map> containing_map); - MUST_USE_RESULT MaybeObject* ExtendToFullTransitionArray(); - - // Copy the transition array, inserting a new transition. + // Create a transition array, copying from the owning map if it already has + // one, otherwise creating a new one according to flag. // TODO(verwaest): This should not cause an existing transition to be // overwritten. - MUST_USE_RESULT MaybeObject* CopyInsert(Name* name, Map* target); - - // Copy a single transition from the origin array. - inline void NoIncrementalWriteBarrierCopyFrom(TransitionArray* origin, - int origin_transition, - int target_transition); + static Handle<TransitionArray> CopyInsert(Handle<Map> map, + Handle<Name> name, + Handle<Map> target, + SimpleTransitionFlag flag); // Search a transition for a given property name. inline int Search(Name* name); // Allocates a TransitionArray. - MUST_USE_RESULT static MaybeObject* Allocate( + static Handle<TransitionArray> Allocate( Isolate* isolate, int number_of_transitions); bool IsSimpleTransition() { @@ -199,10 +170,24 @@ class TransitionArray: public FixedArray { kTransitionTarget; } + static Handle<TransitionArray> AllocateSimple( + Isolate* isolate, Handle<Map> target); + + // Allocate a new transition array with a single entry. + static Handle<TransitionArray> NewWith(Handle<Map> map, + Handle<Name> name, + Handle<Map> target, + SimpleTransitionFlag flag); + inline void NoIncrementalWriteBarrierSet(int transition_number, Name* key, Map* target); + // Copy a single transition from the origin array. + inline void NoIncrementalWriteBarrierCopyFrom(TransitionArray* origin, + int origin_transition, + int target_transition); + DISALLOW_IMPLICIT_CONSTRUCTORS(TransitionArray); }; diff --git a/deps/v8/src/trig-table.h b/deps/v8/src/trig-table.h index 081c0389a..7332152a9 100644 --- a/deps/v8/src/trig-table.h +++ b/deps/v8/src/trig-table.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TRIG_TABLE_H_ #define V8_TRIG_TABLE_H_ diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc index 99b1b3d89..83fcd0f25 100644 --- a/deps/v8/src/type-info.cc +++ b/deps/v8/src/type-info.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -43,18 +20,18 @@ namespace internal { TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code, + Handle<FixedArray> feedback_vector, Handle<Context> native_context, Zone* zone) : native_context_(native_context), zone_(zone) { - Object* raw_info = code->type_feedback_info(); - if (raw_info->IsTypeFeedbackInfo()) { - feedback_vector_ = Handle<FixedArray>(TypeFeedbackInfo::cast(raw_info)-> - feedback_vector()); - } - BuildDictionary(code); ASSERT(dictionary_->IsDictionary()); + // We make a copy of the feedback vector because a GC could clear + // the type feedback info contained therein. + // TODO(mvstanton): revisit the decision to copy when we weakly + // traverse the feedback vector at GC time. + feedback_vector_ = isolate()->factory()->CopyFixedArray(feedback_vector); } @@ -136,9 +113,9 @@ bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) { byte TypeFeedbackOracle::ForInType(int feedback_vector_slot) { Handle<Object> value = GetInfo(feedback_vector_slot); - return value->IsSmi() && - Smi::cast(*value)->value() == TypeFeedbackInfo::kForInFastCaseMarker - ? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN; + return value.is_identical_to( + TypeFeedbackInfo::UninitializedSentinel(isolate())) + ? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN; } @@ -217,8 +194,8 @@ void TypeFeedbackOracle::CompareType(TypeFeedbackId id, Handle<Map> map; Map* raw_map = code->FindFirstMap(); if (raw_map != NULL) { - map = Map::CurrentMapForDeprecated(handle(raw_map)); - if (!map.is_null() && CanRetainOtherContext(*map, *native_context_)) { + if (Map::CurrentMapForDeprecated(handle(raw_map)).ToHandle(&map) && + CanRetainOtherContext(*map, *native_context_)) { map = Handle<Map>::null(); } } @@ -228,7 +205,7 @@ void TypeFeedbackOracle::CompareType(TypeFeedbackId id, CompareIC::StubInfoToType( stub_minor_key, left_type, right_type, combined_type, map, zone()); } else if (code->is_compare_nil_ic_stub()) { - CompareNilICStub stub(code->extra_ic_state()); + CompareNilICStub stub(isolate(), code->extra_ic_state()); *combined_type = stub.GetType(zone(), map); *left_type = *right_type = stub.GetInputType(zone(), map); } @@ -255,7 +232,7 @@ void TypeFeedbackOracle::BinaryType(TypeFeedbackId id, } Handle<Code> code = Handle<Code>::cast(object); ASSERT_EQ(Code::BINARY_OP_IC, code->kind()); - BinaryOpIC::State state(code->extra_ic_state()); + BinaryOpIC::State state(isolate(), code->extra_ic_state()); ASSERT_EQ(op, state.op()); *left = state.GetLeftType(zone()); @@ -277,7 +254,7 @@ Type* TypeFeedbackOracle::CountType(TypeFeedbackId id) { if (!object->IsCode()) return Type::None(zone()); Handle<Code> code = Handle<Code>::cast(object); ASSERT_EQ(Code::BINARY_OP_IC, code->kind()); - BinaryOpIC::State state(code->extra_ic_state()); + BinaryOpIC::State state(isolate(), code->extra_ic_state()); return state.GetLeftType(zone()); } @@ -286,7 +263,7 @@ void TypeFeedbackOracle::PropertyReceiverTypes( TypeFeedbackId id, Handle<String> name, SmallMapList* receiver_types, bool* is_prototype) { receiver_types->Clear(); - FunctionPrototypeStub proto_stub(Code::LOAD_IC); + FunctionPrototypeStub proto_stub(isolate(), Code::LOAD_IC); *is_prototype = LoadIsStub(id, &proto_stub); if (!*is_prototype) { Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC); @@ -445,8 +422,7 @@ void TypeFeedbackOracle::CreateDictionary(Handle<Code> code, ZoneList<RelocInfo>* infos) { AllowHeapAllocation allocation_allowed; Code* old_code = *code; - dictionary_ = - isolate()->factory()->NewUnseededNumberDictionary(infos->length()); + dictionary_ = UnseededNumberDictionary::New(isolate(), infos->length()); RelocateRelocInfos(infos, old_code, *code); } @@ -492,14 +468,11 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) { void TypeFeedbackOracle::SetInfo(TypeFeedbackId ast_id, Object* target) { ASSERT(dictionary_->FindEntry(IdToKey(ast_id)) == UnseededNumberDictionary::kNotFound); - MaybeObject* maybe_result = dictionary_->AtNumberPut(IdToKey(ast_id), target); - USE(maybe_result); -#ifdef DEBUG - Object* result = NULL; // Dictionary has been allocated with sufficient size for all elements. - ASSERT(maybe_result->ToObject(&result)); - ASSERT(*dictionary_ == result); -#endif + DisallowHeapAllocation no_need_to_resize_dictionary; + HandleScope scope(isolate()); + USE(UnseededNumberDictionary::AtNumberPut( + dictionary_, IdToKey(ast_id), handle(target, isolate()))); } diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h index 5bf653f1c..24a9edbd3 100644 --- a/deps/v8/src/type-info.h +++ b/deps/v8/src/type-info.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TYPE_INFO_H_ #define V8_TYPE_INFO_H_ @@ -44,6 +21,7 @@ class SmallMapList; class TypeFeedbackOracle: public ZoneObject { public: TypeFeedbackOracle(Handle<Code> code, + Handle<FixedArray> feedback_vector, Handle<Context> native_context, Zone* zone); diff --git a/deps/v8/src/typedarray.js b/deps/v8/src/typedarray.js index 109d62700..267d43d51 100644 --- a/deps/v8/src/typedarray.js +++ b/deps/v8/src/typedarray.js @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; @@ -57,7 +34,7 @@ macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE) length = ToPositiveInteger(length, "invalid_typed_array_length"); } - var bufferByteLength = %ArrayBufferGetByteLength(buffer); + var bufferByteLength = %_ArrayBufferGetByteLength(buffer); var offset; if (IS_UNDEFINED(byteOffset)) { offset = 0; @@ -125,7 +102,6 @@ macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE) } function NAMEConstructor(arg1, arg2, arg3) { - if (%_IsConstructCall()) { if (IS_ARRAYBUFFER(arg1)) { NAMEConstructByArrayBuffer(this, arg1, arg2, arg3); @@ -139,34 +115,52 @@ macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE) throw MakeTypeError("constructor_not_function", ["NAME"]) } } -endmacro -TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTOR) + function NAME_GetBuffer() { + if (!(%_ClassOf(this) === 'NAME')) { + throw MakeTypeError('incompatible_method_receiver', + ["NAME.buffer", this]); + } + return %TypedArrayGetBuffer(this); + } -function TypedArrayGetBuffer() { - return %TypedArrayGetBuffer(this); -} + function NAME_GetByteLength() { + if (!(%_ClassOf(this) === 'NAME')) { + throw MakeTypeError('incompatible_method_receiver', + ["NAME.byteLength", this]); + } + return %_ArrayBufferViewGetByteLength(this); + } -function TypedArrayGetByteLength() { - return %TypedArrayGetByteLength(this); -} + function NAME_GetByteOffset() { + if (!(%_ClassOf(this) === 'NAME')) { + throw MakeTypeError('incompatible_method_receiver', + ["NAME.byteOffset", this]); + } + return %_ArrayBufferViewGetByteOffset(this); + } -function TypedArrayGetByteOffset() { - return %TypedArrayGetByteOffset(this); -} + function NAME_GetLength() { + if (!(%_ClassOf(this) === 'NAME')) { + throw MakeTypeError('incompatible_method_receiver', + ["NAME.length", this]); + } + return %_TypedArrayGetLength(this); + } -function TypedArrayGetLength() { - return %TypedArrayGetLength(this); -} + var $NAME = global.NAME; -function CreateSubArray(elementSize, constructor) { - return function(begin, end) { + function NAMESubArray(begin, end) { + if (!(%_ClassOf(this) === 'NAME')) { + throw MakeTypeError('incompatible_method_receiver', + ["NAME.subarray", this]); + } var beginInt = TO_INTEGER(begin); if (!IS_UNDEFINED(end)) { end = TO_INTEGER(end); } - var srcLength = %TypedArrayGetLength(this); + var srcLength = %_TypedArrayGetLength(this); if (beginInt < 0) { beginInt = MathMax(0, srcLength + beginInt); } else { @@ -184,11 +178,14 @@ function CreateSubArray(elementSize, constructor) { } var newLength = endInt - beginInt; var beginByteOffset = - %TypedArrayGetByteOffset(this) + beginInt * elementSize; - return new constructor(%TypedArrayGetBuffer(this), - beginByteOffset, newLength); + %_ArrayBufferViewGetByteOffset(this) + beginInt * ELEMENT_SIZE; + return new $NAME(%TypedArrayGetBuffer(this), + beginByteOffset, newLength); } -} +endmacro + +TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTOR) + function TypedArraySetFromArrayLike(target, source, sourceLength, offset) { if (offset > 0) { @@ -296,34 +293,34 @@ function TypedArraySet(obj, offset) { // ------------------------------------------------------------------- -function SetupTypedArray(constructor, fun, elementSize) { +function SetupTypedArrays() { +macro SETUP_TYPED_ARRAY(ARRAY_ID, NAME, ELEMENT_SIZE) %CheckIsBootstrapping(); - %SetCode(constructor, fun); - %FunctionSetPrototype(constructor, new $Object()); + %SetCode(global.NAME, NAMEConstructor); + %FunctionSetPrototype(global.NAME, new $Object()); - %SetProperty(constructor, "BYTES_PER_ELEMENT", elementSize, + %SetProperty(global.NAME, "BYTES_PER_ELEMENT", ELEMENT_SIZE, READ_ONLY | DONT_ENUM | DONT_DELETE); - %SetProperty(constructor.prototype, - "constructor", constructor, DONT_ENUM); - %SetProperty(constructor.prototype, - "BYTES_PER_ELEMENT", elementSize, + %SetProperty(global.NAME.prototype, + "constructor", global.NAME, DONT_ENUM); + %SetProperty(global.NAME.prototype, + "BYTES_PER_ELEMENT", ELEMENT_SIZE, READ_ONLY | DONT_ENUM | DONT_DELETE); - InstallGetter(constructor.prototype, "buffer", TypedArrayGetBuffer); - InstallGetter(constructor.prototype, "byteOffset", TypedArrayGetByteOffset); - InstallGetter(constructor.prototype, "byteLength", TypedArrayGetByteLength); - InstallGetter(constructor.prototype, "length", TypedArrayGetLength); + InstallGetter(global.NAME.prototype, "buffer", NAME_GetBuffer); + InstallGetter(global.NAME.prototype, "byteOffset", NAME_GetByteOffset); + InstallGetter(global.NAME.prototype, "byteLength", NAME_GetByteLength); + InstallGetter(global.NAME.prototype, "length", NAME_GetLength); - InstallFunctions(constructor.prototype, DONT_ENUM, $Array( - "subarray", CreateSubArray(elementSize, constructor), + InstallFunctions(global.NAME.prototype, DONT_ENUM, $Array( + "subarray", NAMESubArray, "set", TypedArraySet )); -} - -macro SETUP_TYPED_ARRAY(ARRAY_ID, NAME, ELEMENT_SIZE) - SetupTypedArray (global.NAME, NAMEConstructor, ELEMENT_SIZE); endmacro TYPED_ARRAYS(SETUP_TYPED_ARRAY) +} + +SetupTypedArrays(); // --------------------------- DataView ----------------------------- @@ -341,7 +338,7 @@ function DataViewConstructor(buffer, byteOffset, byteLength) { // length = 3 byteLength = TO_INTEGER(byteLength); } - var bufferByteLength = %ArrayBufferGetByteLength(buffer); + var bufferByteLength = %_ArrayBufferGetByteLength(buffer); var offset = IS_UNDEFINED(byteOffset) ? 0 : byteOffset; if (offset > bufferByteLength) { @@ -373,7 +370,7 @@ function DataViewGetByteOffset() { throw MakeTypeError('incompatible_method_receiver', ['DataView.byteOffset', this]); } - return %DataViewGetByteOffset(this); + return %_ArrayBufferViewGetByteOffset(this); } function DataViewGetByteLength() { @@ -381,7 +378,7 @@ function DataViewGetByteLength() { throw MakeTypeError('incompatible_method_receiver', ['DataView.byteLength', this]); } - return %DataViewGetByteLength(this); + return %_ArrayBufferViewGetByteLength(this); } macro DATA_VIEW_TYPES(FUNCTION) diff --git a/deps/v8/src/types-inl.h b/deps/v8/src/types-inl.h new file mode 100644 index 000000000..ca4f120c7 --- /dev/null +++ b/deps/v8/src/types-inl.h @@ -0,0 +1,352 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_TYPES_INL_H_ +#define V8_TYPES_INL_H_ + +#include "types.h" + +#include "factory.h" +#include "handles-inl.h" + +namespace v8 { +namespace internal { + +// -------------------------------------------------------------------------- // +// TypeImpl + +template<class Config> +TypeImpl<Config>* TypeImpl<Config>::cast(typename Config::Base* object) { + TypeImpl* t = static_cast<TypeImpl*>(object); + ASSERT(t->IsBitset() || t->IsClass() || t->IsConstant() || + t->IsUnion() || t->IsArray() || t->IsFunction()); + return t; +} + + +template<class Config> +bool TypeImpl<Config>::NowContains(i::Object* value) { + DisallowHeapAllocation no_allocation; + if (this->IsAny()) return true; + if (value->IsHeapObject()) { + i::Map* map = i::HeapObject::cast(value)->map(); + for (Iterator<i::Map> it = this->Classes(); !it.Done(); it.Advance()) { + if (*it.Current() == map) return true; + } + } + return this->Contains(value); +} + + +// -------------------------------------------------------------------------- // +// ZoneTypeConfig + +// static +template<class T> +T* ZoneTypeConfig::handle(T* type) { + return type; +} + + +// static +template<class T> +T* ZoneTypeConfig::cast(Type* type) { + return static_cast<T*>(type); +} + + +// static +bool ZoneTypeConfig::is_bitset(Type* type) { + return reinterpret_cast<intptr_t>(type) & 1; +} + + +// static +bool ZoneTypeConfig::is_struct(Type* type, int tag) { + return !is_bitset(type) && struct_tag(as_struct(type)) == tag; +} + + +// static +bool ZoneTypeConfig::is_class(Type* type) { + return is_struct(type, Type::StructuralType::kClassTag); +} + + +// static +bool ZoneTypeConfig::is_constant(Type* type) { + return is_struct(type, Type::StructuralType::kConstantTag); +} + + +// static +int ZoneTypeConfig::as_bitset(Type* type) { + ASSERT(is_bitset(type)); + return static_cast<int>(reinterpret_cast<intptr_t>(type) >> 1); +} + + +// static +ZoneTypeConfig::Struct* ZoneTypeConfig::as_struct(Type* type) { + ASSERT(!is_bitset(type)); + return reinterpret_cast<Struct*>(type); +} + + +// static +i::Handle<i::Map> ZoneTypeConfig::as_class(Type* type) { + ASSERT(is_class(type)); + return i::Handle<i::Map>(static_cast<i::Map**>(as_struct(type)[3])); +} + + +// static +i::Handle<i::Object> ZoneTypeConfig::as_constant(Type* type) { + ASSERT(is_constant(type)); + return i::Handle<i::Object>( + static_cast<i::Object**>(as_struct(type)[3])); +} + + +// static +ZoneTypeConfig::Type* ZoneTypeConfig::from_bitset(int bitset) { + return reinterpret_cast<Type*>((bitset << 1) | 1); +} + + +// static +ZoneTypeConfig::Type* ZoneTypeConfig::from_bitset(int bitset, Zone* Zone) { + return from_bitset(bitset); +} + + +// static +ZoneTypeConfig::Type* ZoneTypeConfig::from_struct(Struct* structured) { + return reinterpret_cast<Type*>(structured); +} + + +// static +ZoneTypeConfig::Type* ZoneTypeConfig::from_class( + i::Handle<i::Map> map, int lub, Zone* zone) { + Struct* structured = struct_create(Type::StructuralType::kClassTag, 2, zone); + structured[2] = from_bitset(lub); + structured[3] = map.location(); + return from_struct(structured); +} + + +// static +ZoneTypeConfig::Type* ZoneTypeConfig::from_constant( + i::Handle<i::Object> value, int lub, Zone* zone) { + Struct* structured = + struct_create(Type::StructuralType::kConstantTag, 2, zone); + structured[2] = from_bitset(lub); + structured[3] = value.location(); + return from_struct(structured); +} + + +// static +ZoneTypeConfig::Struct* ZoneTypeConfig::struct_create( + int tag, int length, Zone* zone) { + Struct* structured = reinterpret_cast<Struct*>( + zone->New(sizeof(void*) * (length + 2))); // NOLINT + structured[0] = reinterpret_cast<void*>(tag); + structured[1] = reinterpret_cast<void*>(length); + return structured; +} + + +// static +void ZoneTypeConfig::struct_shrink(Struct* structured, int length) { + ASSERT(0 <= length && length <= struct_length(structured)); + structured[1] = reinterpret_cast<void*>(length); +} + + +// static +int ZoneTypeConfig::struct_tag(Struct* structured) { + return static_cast<int>(reinterpret_cast<intptr_t>(structured[0])); +} + + +// static +int ZoneTypeConfig::struct_length(Struct* structured) { + return static_cast<int>(reinterpret_cast<intptr_t>(structured[1])); +} + + +// static +Type* ZoneTypeConfig::struct_get(Struct* structured, int i) { + ASSERT(0 <= i && i <= struct_length(structured)); + return static_cast<Type*>(structured[2 + i]); +} + + +// static +void ZoneTypeConfig::struct_set(Struct* structured, int i, Type* type) { + ASSERT(0 <= i && i <= struct_length(structured)); + structured[2 + i] = type; +} + + +// static +int ZoneTypeConfig::lub_bitset(Type* type) { + ASSERT(is_class(type) || is_constant(type)); + return as_bitset(struct_get(as_struct(type), 0)); +} + + +// -------------------------------------------------------------------------- // +// HeapTypeConfig + +// static +template<class T> +i::Handle<T> HeapTypeConfig::handle(T* type) { + return i::handle(type, i::HeapObject::cast(type)->GetIsolate()); +} + + +// static +template<class T> +i::Handle<T> HeapTypeConfig::cast(i::Handle<Type> type) { + return i::Handle<T>::cast(type); +} + + +// static +bool HeapTypeConfig::is_bitset(Type* type) { + return type->IsSmi(); +} + + +// static +bool HeapTypeConfig::is_class(Type* type) { + return type->IsMap(); +} + + +// static +bool HeapTypeConfig::is_constant(Type* type) { + return type->IsBox(); +} + + +// static +bool HeapTypeConfig::is_struct(Type* type, int tag) { + return type->IsFixedArray() && struct_tag(as_struct(type)) == tag; +} + + +// static +int HeapTypeConfig::as_bitset(Type* type) { + return i::Smi::cast(type)->value(); +} + + +// static +i::Handle<i::Map> HeapTypeConfig::as_class(Type* type) { + return i::handle(i::Map::cast(type)); +} + + +// static +i::Handle<i::Object> HeapTypeConfig::as_constant(Type* type) { + i::Box* box = i::Box::cast(type); + return i::handle(box->value(), box->GetIsolate()); +} + + +// static +i::Handle<HeapTypeConfig::Struct> HeapTypeConfig::as_struct(Type* type) { + return i::handle(Struct::cast(type)); +} + + +// static +HeapTypeConfig::Type* HeapTypeConfig::from_bitset(int bitset) { + return Type::cast(i::Smi::FromInt(bitset)); +} + + +// static +i::Handle<HeapTypeConfig::Type> HeapTypeConfig::from_bitset( + int bitset, Isolate* isolate) { + return i::handle(from_bitset(bitset), isolate); +} + + +// static +i::Handle<HeapTypeConfig::Type> HeapTypeConfig::from_class( + i::Handle<i::Map> map, int lub, Isolate* isolate) { + return i::Handle<Type>::cast(i::Handle<Object>::cast(map)); +} + + +// static +i::Handle<HeapTypeConfig::Type> HeapTypeConfig::from_constant( + i::Handle<i::Object> value, int lub, Isolate* isolate) { + i::Handle<Box> box = isolate->factory()->NewBox(value); + return i::Handle<Type>::cast(i::Handle<Object>::cast(box)); +} + + +// static +i::Handle<HeapTypeConfig::Type> HeapTypeConfig::from_struct( + i::Handle<Struct> structured) { + return i::Handle<Type>::cast(i::Handle<Object>::cast(structured)); +} + + +// static +i::Handle<HeapTypeConfig::Struct> HeapTypeConfig::struct_create( + int tag, int length, Isolate* isolate) { + i::Handle<Struct> structured = isolate->factory()->NewFixedArray(length + 1); + structured->set(0, i::Smi::FromInt(tag)); + return structured; +} + + +// static +void HeapTypeConfig::struct_shrink(i::Handle<Struct> structured, int length) { + structured->Shrink(length + 1); +} + + +// static +int HeapTypeConfig::struct_tag(i::Handle<Struct> structured) { + return static_cast<i::Smi*>(structured->get(0))->value(); +} + + +// static +int HeapTypeConfig::struct_length(i::Handle<Struct> structured) { + return structured->length() - 1; +} + + +// static +i::Handle<HeapTypeConfig::Type> HeapTypeConfig::struct_get( + i::Handle<Struct> structured, int i) { + Type* type = static_cast<Type*>(structured->get(i + 1)); + return i::handle(type, structured->GetIsolate()); +} + + +// static +void HeapTypeConfig::struct_set( + i::Handle<Struct> structured, int i, i::Handle<Type> type) { + structured->set(i + 1, *type); +} + + +// static +int HeapTypeConfig::lub_bitset(Type* type) { + return 0; // kNone, which causes recomputation. +} + +} } // namespace v8::internal + +#endif // V8_TYPES_INL_H_ diff --git a/deps/v8/src/types.cc b/deps/v8/src/types.cc index e269582ca..5270c0ea6 100644 --- a/deps/v8/src/types.cc +++ b/deps/v8/src/types.cc @@ -1,45 +1,25 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "types.h" + #include "string-stream.h" +#include "types-inl.h" namespace v8 { namespace internal { template<class Config> int TypeImpl<Config>::NumClasses() { + DisallowHeapAllocation no_allocation; if (this->IsClass()) { return 1; } else if (this->IsUnion()) { - UnionedHandle unioned = this->AsUnion(); + UnionHandle unioned = handle(this->AsUnion()); int result = 0; - for (int i = 0; i < Config::union_length(unioned); ++i) { - if (Config::union_get(unioned, i)->IsClass()) ++result; + for (int i = 0; i < unioned->Length(); ++i) { + if (unioned->Get(i)->IsClass()) ++result; } return result; } else { @@ -50,13 +30,14 @@ int TypeImpl<Config>::NumClasses() { template<class Config> int TypeImpl<Config>::NumConstants() { + DisallowHeapAllocation no_allocation; if (this->IsConstant()) { return 1; } else if (this->IsUnion()) { - UnionedHandle unioned = this->AsUnion(); + UnionHandle unioned = handle(this->AsUnion()); int result = 0; - for (int i = 0; i < Config::union_length(unioned); ++i) { - if (Config::union_get(unioned, i)->IsConstant()) ++result; + for (int i = 0; i < unioned->Length(); ++i) { + if (unioned->Get(i)->IsConstant()) ++result; } return result; } else { @@ -69,7 +50,7 @@ template<class Config> template<class T> typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Iterator<T>::get_type() { ASSERT(!Done()); - return type_->IsUnion() ? Config::union_get(type_->AsUnion(), index_) : type_; + return type_->IsUnion() ? type_->AsUnion()->Get(index_) : type_; } @@ -87,7 +68,7 @@ struct TypeImplIteratorAux<Config, i::Map> { return type->IsClass(); } static i::Handle<i::Map> current(typename TypeImpl<Config>::TypeHandle type) { - return type->AsClass(); + return type->AsClass()->Map(); } }; @@ -98,7 +79,7 @@ struct TypeImplIteratorAux<Config, i::Object> { } static i::Handle<i::Object> current( typename TypeImpl<Config>::TypeHandle type) { - return type->AsConstant(); + return type->AsConstant()->Value(); } }; @@ -115,11 +96,12 @@ i::Handle<T> TypeImpl<Config>::Iterator<T>::Current() { template<class Config> template<class T> void TypeImpl<Config>::Iterator<T>::Advance() { + DisallowHeapAllocation no_allocation; ++index_; if (type_->IsUnion()) { - UnionedHandle unioned = type_->AsUnion(); - for (; index_ < Config::union_length(unioned); ++index_) { - if (matches(Config::union_get(unioned, index_))) return; + UnionHandle unioned = handle(type_->AsUnion()); + for (; index_ < unioned->Length(); ++index_) { + if (matches(unioned->Get(index_))) return; } } else if (index_ == 0 && matches(type_)) { return; @@ -128,30 +110,54 @@ void TypeImpl<Config>::Iterator<T>::Advance() { } +// Get the largest bitset subsumed by this type. +template<class Config> +int TypeImpl<Config>::BitsetType::Glb(TypeImpl* type) { + DisallowHeapAllocation no_allocation; + if (type->IsBitset()) { + return type->AsBitset(); + } else if (type->IsUnion()) { + // All but the first are non-bitsets and thus would yield kNone anyway. + return type->AsUnion()->Get(0)->BitsetGlb(); + } else { + return kNone; + } +} + + // Get the smallest bitset subsuming this type. template<class Config> -int TypeImpl<Config>::LubBitset() { - if (this->IsBitset()) { - return this->AsBitset(); - } else if (this->IsUnion()) { - UnionedHandle unioned = this->AsUnion(); +int TypeImpl<Config>::BitsetType::Lub(TypeImpl* type) { + DisallowHeapAllocation no_allocation; + if (type->IsBitset()) { + return type->AsBitset(); + } else if (type->IsUnion()) { + UnionHandle unioned = handle(type->AsUnion()); int bitset = kNone; - for (int i = 0; i < Config::union_length(unioned); ++i) { - bitset |= Config::union_get(unioned, i)->LubBitset(); + for (int i = 0; i < unioned->Length(); ++i) { + bitset |= unioned->Get(i)->BitsetLub(); } return bitset; - } else if (this->IsClass()) { - int bitset = Config::lub_bitset(this); - return bitset ? bitset : LubBitset(*this->AsClass()); + } else if (type->IsClass()) { + int bitset = Config::lub_bitset(type); + return bitset ? bitset : Lub(*type->AsClass()->Map()); + } else if (type->IsConstant()) { + int bitset = Config::lub_bitset(type); + return bitset ? bitset : Lub(*type->AsConstant()->Value()); + } else if (type->IsArray()) { + return kArray; + } else if (type->IsFunction()) { + return kFunction; } else { - int bitset = Config::lub_bitset(this); - return bitset ? bitset : LubBitset(*this->AsConstant()); + UNREACHABLE(); + return kNone; } } template<class Config> -int TypeImpl<Config>::LubBitset(i::Object* value) { +int TypeImpl<Config>::BitsetType::Lub(i::Object* value) { + DisallowHeapAllocation no_allocation; if (value->IsSmi()) return kSignedSmall & kTaggedInt; i::Map* map = i::HeapObject::cast(value)->map(); if (map->instance_type() == HEAP_NUMBER_TYPE) { @@ -161,20 +167,13 @@ int TypeImpl<Config>::LubBitset(i::Object* value) { value->ToInt32(&i) ? (Smi::IsValid(i) ? kSignedSmall : kOtherSigned32) : value->ToUint32(&u) ? kUnsigned32 : kFloat); } - if (map->instance_type() == ODDBALL_TYPE) { - if (value->IsUndefined()) return kUndefined; - if (value->IsNull()) return kNull; - if (value->IsBoolean()) return kBoolean; - if (value->IsTheHole()) return kAny; // TODO(rossberg): kNone? - if (value->IsUninitialized()) return kNone; - UNREACHABLE(); - } - return LubBitset(map); + return Lub(map); } template<class Config> -int TypeImpl<Config>::LubBitset(i::Map* map) { +int TypeImpl<Config>::BitsetType::Lub(i::Map* map) { + DisallowHeapAllocation no_allocation; switch (map->instance_type()) { case STRING_TYPE: case ASCII_STRING_TYPE: @@ -190,8 +189,6 @@ int TypeImpl<Config>::LubBitset(i::Map* map) { case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: case INTERNALIZED_STRING_TYPE: case ASCII_INTERNALIZED_STRING_TYPE: - case CONS_INTERNALIZED_STRING_TYPE: - case CONS_ASCII_INTERNALIZED_STRING_TYPE: case EXTERNAL_INTERNALIZED_STRING_TYPE: case EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE: case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE: @@ -201,8 +198,18 @@ int TypeImpl<Config>::LubBitset(i::Map* map) { return kString; case SYMBOL_TYPE: return kSymbol; - case ODDBALL_TYPE: - return kOddball; + case ODDBALL_TYPE: { + Heap* heap = map->GetHeap(); + if (map == heap->undefined_map()) return kUndefined; + if (map == heap->the_hole_map()) return kAny; // TODO(rossberg): kNone? + if (map == heap->null_map()) return kNull; + if (map == heap->boolean_map()) return kBoolean; + ASSERT(map == heap->uninitialized_map() || + map == heap->no_interceptor_result_sentinel_map() || + map == heap->termination_exception_map() || + map == heap->arguments_marker_map()); + return kInternal & kTaggedPtr; + } case HEAP_NUMBER_TYPE: return kFloat & kTaggedPtr; case JS_VALUE_TYPE: @@ -219,6 +226,8 @@ int TypeImpl<Config>::LubBitset(i::Map* map) { case JS_DATA_VIEW_TYPE: case JS_SET_TYPE: case JS_MAP_TYPE: + case JS_SET_ITERATOR_TYPE: + case JS_MAP_ITERATOR_TYPE: case JS_WEAK_MAP_TYPE: case JS_WEAK_SET_TYPE: if (map->is_undetectable()) return kUndetectable; @@ -247,6 +256,7 @@ int TypeImpl<Config>::LubBitset(i::Map* map) { case EXECUTABLE_ACCESSOR_INFO_TYPE: case ACCESSOR_PAIR_TYPE: case FIXED_ARRAY_TYPE: + case FOREIGN_TYPE: return kInternal & kTaggedPtr; default: UNREACHABLE(); @@ -255,55 +265,63 @@ int TypeImpl<Config>::LubBitset(i::Map* map) { } -// Get the largest bitset subsumed by this type. -template<class Config> -int TypeImpl<Config>::GlbBitset() { - if (this->IsBitset()) { - return this->AsBitset(); - } else if (this->IsUnion()) { - // All but the first are non-bitsets and thus would yield kNone anyway. - return Config::union_get(this->AsUnion(), 0)->GlbBitset(); - } else { - return kNone; - } -} - - // Most precise _current_ type of a value (usually its class). template<class Config> -typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::OfCurrently( - i::Handle<i::Object> value, Region* region) { +typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::NowOf( + i::Object* value, Region* region) { if (value->IsSmi() || - i::HeapObject::cast(*value)->map()->instance_type() == HEAP_NUMBER_TYPE || - i::HeapObject::cast(*value)->map()->instance_type() == ODDBALL_TYPE) { + i::HeapObject::cast(value)->map()->instance_type() == HEAP_NUMBER_TYPE) { return Of(value, region); } - return Class(i::handle(i::HeapObject::cast(*value)->map()), region); + return Class(i::handle(i::HeapObject::cast(value)->map()), region); } // Check this <= that. template<class Config> bool TypeImpl<Config>::SlowIs(TypeImpl* that) { + DisallowHeapAllocation no_allocation; + // Fast path for bitsets. if (this->IsNone()) return true; if (that->IsBitset()) { - return (this->LubBitset() | that->AsBitset()) == that->AsBitset(); + return (BitsetType::Lub(this) | that->AsBitset()) == that->AsBitset(); } if (that->IsClass()) { - return this->IsClass() && *this->AsClass() == *that->AsClass(); + return this->IsClass() + && *this->AsClass()->Map() == *that->AsClass()->Map(); } if (that->IsConstant()) { - return this->IsConstant() && *this->AsConstant() == *that->AsConstant(); + return this->IsConstant() + && *this->AsConstant()->Value() == *that->AsConstant()->Value(); + } + if (that->IsArray()) { + return this->IsArray() + && this->AsArray()->Element()->Equals(that->AsArray()->Element()); + } + if (that->IsFunction()) { + // We currently do not allow for any variance here, in order to keep + // Union and Intersect operations simple. + if (!this->IsFunction()) return false; + FunctionType* this_fun = this->AsFunction(); + FunctionType* that_fun = that->AsFunction(); + if (this_fun->Arity() != that_fun->Arity() || + !this_fun->Result()->Equals(that_fun->Result()) || + !that_fun->Receiver()->Equals(this_fun->Receiver())) { + return false; + } + for (int i = 0; i < this_fun->Arity(); ++i) { + if (!that_fun->Parameter(i)->Equals(this_fun->Parameter(i))) return false; + } + return true; } // (T1 \/ ... \/ Tn) <= T <=> (T1 <= T) /\ ... /\ (Tn <= T) if (this->IsUnion()) { - UnionedHandle unioned = this->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle this_i = Config::union_get(unioned, i); - if (!this_i->Is(that)) return false; + UnionHandle unioned = handle(this->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + if (!unioned->Get(i)->Is(that)) return false; } return true; } @@ -312,10 +330,9 @@ bool TypeImpl<Config>::SlowIs(TypeImpl* that) { // (iff T is not a union) ASSERT(!this->IsUnion()); if (that->IsUnion()) { - UnionedHandle unioned = that->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle that_i = Config::union_get(unioned, i); - if (this->Is(that_i)) return true; + UnionHandle unioned = handle(that->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + if (this->Is(unioned->Get(i))) return true; if (this->IsBitset()) break; // Fast fail, only first field is a bitset. } return false; @@ -326,51 +343,81 @@ bool TypeImpl<Config>::SlowIs(TypeImpl* that) { template<class Config> -bool TypeImpl<Config>::IsCurrently(TypeImpl* that) { - return this->Is(that) || - (this->IsConstant() && that->IsClass() && - this->AsConstant()->IsHeapObject() && - i::HeapObject::cast(*this->AsConstant())->map() == *that->AsClass()); +bool TypeImpl<Config>::NowIs(TypeImpl* that) { + DisallowHeapAllocation no_allocation; + + // TODO(rossberg): this is incorrect for + // Union(Constant(V), T)->NowIs(Class(M)) + // but fuzzing does not cover that! + if (this->IsConstant()) { + i::Object* object = *this->AsConstant()->Value(); + if (object->IsHeapObject()) { + i::Map* map = i::HeapObject::cast(object)->map(); + for (Iterator<i::Map> it = that->Classes(); !it.Done(); it.Advance()) { + if (*it.Current() == map) return true; + } + } + } + return this->Is(that); +} + + +// Check if this contains only (currently) stable classes. +template<class Config> +bool TypeImpl<Config>::NowStable() { + DisallowHeapAllocation no_allocation; + for (Iterator<i::Map> it = this->Classes(); !it.Done(); it.Advance()) { + if (!it.Current()->is_stable()) return false; + } + return true; } // Check this overlaps that. template<class Config> bool TypeImpl<Config>::Maybe(TypeImpl* that) { - // Fast path for bitsets. - if (this->IsBitset()) { - return IsInhabited(this->AsBitset() & that->LubBitset()); - } - if (that->IsBitset()) { - return IsInhabited(this->LubBitset() & that->AsBitset()); - } + DisallowHeapAllocation no_allocation; // (T1 \/ ... \/ Tn) overlaps T <=> (T1 overlaps T) \/ ... \/ (Tn overlaps T) if (this->IsUnion()) { - UnionedHandle unioned = this->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle this_i = Config::union_get(unioned, i); - if (this_i->Maybe(that)) return true; + UnionHandle unioned = handle(this->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + if (unioned->Get(i)->Maybe(that)) return true; } return false; } // T overlaps (T1 \/ ... \/ Tn) <=> (T overlaps T1) \/ ... \/ (T overlaps Tn) if (that->IsUnion()) { - UnionedHandle unioned = that->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle that_i = Config::union_get(unioned, i); - if (this->Maybe(that_i)) return true; + UnionHandle unioned = handle(that->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + if (this->Maybe(unioned->Get(i))) return true; } return false; } ASSERT(!this->IsUnion() && !that->IsUnion()); + if (this->IsBitset()) { + return BitsetType::IsInhabited(this->AsBitset() & that->BitsetLub()); + } + if (that->IsBitset()) { + return BitsetType::IsInhabited(this->BitsetLub() & that->AsBitset()); + } if (this->IsClass()) { - return that->IsClass() && *this->AsClass() == *that->AsClass(); + return that->IsClass() + && *this->AsClass()->Map() == *that->AsClass()->Map(); } if (this->IsConstant()) { - return that->IsConstant() && *this->AsConstant() == *that->AsConstant(); + return that->IsConstant() + && *this->AsConstant()->Value() == *that->AsConstant()->Value(); + } + if (this->IsArray()) { + // There is no variance! + return this->Equals(that); + } + if (this->IsFunction()) { + // There is no variance! + return this->Equals(that); } return false; @@ -378,56 +425,65 @@ bool TypeImpl<Config>::Maybe(TypeImpl* that) { template<class Config> -bool TypeImpl<Config>::InUnion(UnionedHandle unioned, int current_size) { +bool TypeImpl<Config>::Contains(i::Object* value) { + DisallowHeapAllocation no_allocation; + + for (Iterator<i::Object> it = this->Constants(); !it.Done(); it.Advance()) { + if (*it.Current() == value) return true; + } + return BitsetType::New(BitsetType::Lub(value))->Is(this); +} + + +template<class Config> +bool TypeImpl<Config>::InUnion(UnionHandle unioned, int current_size) { ASSERT(!this->IsUnion()); for (int i = 0; i < current_size; ++i) { - TypeHandle type = Config::union_get(unioned, i); - if (this->Is(type)) return true; + if (this->Is(unioned->Get(i))) return true; } return false; } -// Get non-bitsets from this which are not subsumed by union, store at unioned, +// Get non-bitsets from this which are not subsumed by union, store at result, // starting at index. Returns updated index. template<class Config> int TypeImpl<Config>::ExtendUnion( - UnionedHandle result, TypeHandle type, int current_size) { + UnionHandle result, TypeHandle type, int current_size) { int old_size = current_size; - if (type->IsClass() || type->IsConstant()) { - if (!type->InUnion(result, old_size)) { - Config::union_set(result, current_size++, type); - } - } else if (type->IsUnion()) { - UnionedHandle unioned = type->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle type = Config::union_get(unioned, i); - ASSERT(i == 0 || - !(type->IsBitset() || type->Is(Config::union_get(unioned, 0)))); + if (type->IsUnion()) { + UnionHandle unioned = handle(type->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + TypeHandle type = unioned->Get(i); + ASSERT(i == 0 || !(type->IsBitset() || type->Is(unioned->Get(0)))); if (!type->IsBitset() && !type->InUnion(result, old_size)) { - Config::union_set(result, current_size++, type); + result->Set(current_size++, type); } } + } else if (!type->IsBitset()) { + // For all structural types, subtyping implies equivalence. + ASSERT(type->IsClass() || type->IsConstant() || + type->IsArray() || type->IsFunction()); + if (!type->InUnion(result, old_size)) { + result->Set(current_size++, type); + } } return current_size; } // Union is O(1) on simple bit unions, but O(n*m) on structured unions. -// TODO(rossberg): Should we use object sets somehow? Is it worth it? template<class Config> typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Union( TypeHandle type1, TypeHandle type2, Region* region) { // Fast case: bit sets. if (type1->IsBitset() && type2->IsBitset()) { - return Config::from_bitset(type1->AsBitset() | type2->AsBitset(), region); + return BitsetType::New(type1->AsBitset() | type2->AsBitset(), region); } // Fast case: top or bottom types. - if (type1->IsAny()) return type1; - if (type2->IsAny()) return type2; - if (type1->IsNone()) return type2; - if (type2->IsNone()) return type1; + if (type1->IsAny() || type2->IsNone()) return type1; + if (type2->IsAny() || type1->IsNone()) return type2; // Semi-fast case: Unioned objects are neither involved nor produced. if (!(type1->IsUnion() || type2->IsUnion())) { @@ -436,74 +492,74 @@ typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Union( } // Slow case: may need to produce a Unioned object. - int size = type1->IsBitset() || type2->IsBitset() ? 1 : 0; + int size = 0; if (!type1->IsBitset()) { - size += (type1->IsUnion() ? Config::union_length(type1->AsUnion()) : 1); + size += (type1->IsUnion() ? type1->AsUnion()->Length() : 1); } if (!type2->IsBitset()) { - size += (type2->IsUnion() ? Config::union_length(type2->AsUnion()) : 1); + size += (type2->IsUnion() ? type2->AsUnion()->Length() : 1); } - ASSERT(size >= 2); - UnionedHandle unioned = Config::union_create(size, region); - size = 0; + int bitset = type1->BitsetGlb() | type2->BitsetGlb(); + if (bitset != BitsetType::kNone) ++size; + ASSERT(size >= 1); - int bitset = type1->GlbBitset() | type2->GlbBitset(); - if (bitset != kNone) { - Config::union_set(unioned, size++, Config::from_bitset(bitset, region)); + UnionHandle unioned = UnionType::New(size, region); + size = 0; + if (bitset != BitsetType::kNone) { + unioned->Set(size++, BitsetType::New(bitset, region)); } size = ExtendUnion(unioned, type1, size); size = ExtendUnion(unioned, type2, size); if (size == 1) { - return Config::union_get(unioned, 0); + return unioned->Get(0); } else { - Config::union_shrink(unioned, size); - return Config::from_union(unioned); + unioned->Shrink(size); + return unioned; } } -// Get non-bitsets from type which are also in other, store at unioned, +// Get non-bitsets from type which are also in other, store at result, // starting at index. Returns updated index. template<class Config> int TypeImpl<Config>::ExtendIntersection( - UnionedHandle result, TypeHandle type, TypeHandle other, int current_size) { + UnionHandle result, TypeHandle type, TypeHandle other, int current_size) { int old_size = current_size; - if (type->IsClass() || type->IsConstant()) { - if (type->Is(other) && !type->InUnion(result, old_size)) { - Config::union_set(result, current_size++, type); - } - } else if (type->IsUnion()) { - UnionedHandle unioned = type->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle type = Config::union_get(unioned, i); - ASSERT(i == 0 || - !(type->IsBitset() || type->Is(Config::union_get(unioned, 0)))); + if (type->IsUnion()) { + UnionHandle unioned = handle(type->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + TypeHandle type = unioned->Get(i); + ASSERT(i == 0 || !(type->IsBitset() || type->Is(unioned->Get(0)))); if (!type->IsBitset() && type->Is(other) && !type->InUnion(result, old_size)) { - Config::union_set(result, current_size++, type); + result->Set(current_size++, type); } } + } else if (!type->IsBitset()) { + // For all structural types, subtyping implies equivalence. + ASSERT(type->IsClass() || type->IsConstant() || + type->IsArray() || type->IsFunction()); + if (type->Is(other) && !type->InUnion(result, old_size)) { + result->Set(current_size++, type); + } } return current_size; } // Intersection is O(1) on simple bit unions, but O(n*m) on structured unions. -// TODO(rossberg): Should we use object sets somehow? Is it worth it? template<class Config> typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Intersect( TypeHandle type1, TypeHandle type2, Region* region) { // Fast case: bit sets. if (type1->IsBitset() && type2->IsBitset()) { - return Config::from_bitset(type1->AsBitset() & type2->AsBitset(), region); + return BitsetType::New(type1->AsBitset() & type2->AsBitset(), region); } // Fast case: top or bottom types. - if (type1->IsNone()) return type1; - if (type2->IsNone()) return type2; - if (type1->IsAny()) return type2; - if (type2->IsAny()) return type1; + if (type1->IsNone() || type2->IsAny()) return type1; + if (type2->IsNone() || type1->IsAny()) return type2; // Semi-fast case: Unioned objects are neither involved nor produced. if (!(type1->IsUnion() || type2->IsUnion())) { @@ -514,19 +570,19 @@ typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Intersect( // Slow case: may need to produce a Unioned object. int size = 0; if (!type1->IsBitset()) { - size = (type1->IsUnion() ? Config::union_length(type1->AsUnion()) : 2); + size += (type1->IsUnion() ? type1->AsUnion()->Length() : 1); } if (!type2->IsBitset()) { - int size2 = (type2->IsUnion() ? Config::union_length(type2->AsUnion()) : 2); - size = (size == 0 ? size2 : Min(size, size2)); + size += (type2->IsUnion() ? type2->AsUnion()->Length() : 1); } - ASSERT(size >= 2); - UnionedHandle unioned = Config::union_create(size, region); - size = 0; + int bitset = type1->BitsetGlb() & type2->BitsetGlb(); + if (bitset != BitsetType::kNone) ++size; + ASSERT(size >= 1); - int bitset = type1->GlbBitset() & type2->GlbBitset(); - if (bitset != kNone) { - Config::union_set(unioned, size++, Config::from_bitset(bitset, region)); + UnionHandle unioned = UnionType::New(size, region); + size = 0; + if (bitset != BitsetType::kNone) { + unioned->Set(size++, BitsetType::New(bitset, region)); } size = ExtendIntersection(unioned, type1, type2, size); size = ExtendIntersection(unioned, type2, type1, size); @@ -534,10 +590,10 @@ typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Intersect( if (size == 0) { return None(region); } else if (size == 1) { - return Config::union_get(unioned, 0); + return unioned->Get(0); } else { - Config::union_shrink(unioned, size); - return Config::from_union(unioned); + unioned->Shrink(size); + return unioned; } } @@ -547,27 +603,41 @@ template<class OtherType> typename TypeImpl<Config>::TypeHandle TypeImpl<Config>::Convert( typename OtherType::TypeHandle type, Region* region) { if (type->IsBitset()) { - return Config::from_bitset(type->AsBitset(), region); + return BitsetType::New(type->AsBitset(), region); } else if (type->IsClass()) { - return Config::from_class(type->AsClass(), type->LubBitset(), region); + return ClassType::New(type->AsClass()->Map(), region); } else if (type->IsConstant()) { - return Config::from_constant(type->AsConstant(), type->LubBitset(), region); - } else { - ASSERT(type->IsUnion()); - typename OtherType::UnionedHandle unioned = type->AsUnion(); - int length = OtherType::UnionLength(unioned); - UnionedHandle new_unioned = Config::union_create(length, region); + return ConstantType::New(type->AsConstant()->Value(), region); + } else if (type->IsUnion()) { + int length = type->AsUnion()->Length(); + UnionHandle unioned = UnionType::New(length, region); for (int i = 0; i < length; ++i) { - Config::union_set(new_unioned, i, - Convert<OtherType>(OtherType::UnionGet(unioned, i), region)); + unioned->Set(i, Convert<OtherType>(type->AsUnion()->Get(i), region)); } - return Config::from_union(new_unioned); + return unioned; + } else if (type->IsArray()) { + return ArrayType::New( + Convert<OtherType>(type->AsArray()->Element(), region), region); + } else if (type->IsFunction()) { + FunctionHandle function = FunctionType::New( + Convert<OtherType>(type->AsFunction()->Result(), region), + Convert<OtherType>(type->AsFunction()->Receiver(), region), + type->AsFunction()->Arity(), region); + for (int i = 0; i < function->Arity(); ++i) { + function->InitParameter(i, + Convert<OtherType>(type->AsFunction()->Parameter(i), region)); + } + return function; + } else { + UNREACHABLE(); + return None(region); } } // TODO(rossberg): this does not belong here. Representation Representation::FromType(Type* type) { + DisallowHeapAllocation no_allocation; if (type->Is(Type::None())) return Representation::None(); if (type->Is(Type::SignedSmall())) return Representation::Smi(); if (type->Is(Type::Signed32())) return Representation::Integer32(); @@ -576,7 +646,6 @@ Representation Representation::FromType(Type* type) { } -#ifdef OBJECT_PRINT template<class Config> void TypeImpl<Config>::TypePrint(PrintDimension dim) { TypePrint(stdout, dim); @@ -586,7 +655,7 @@ void TypeImpl<Config>::TypePrint(PrintDimension dim) { template<class Config> -const char* TypeImpl<Config>::bitset_name(int bitset) { +const char* TypeImpl<Config>::BitsetType::Name(int bitset) { switch (bitset) { case kAny & kRepresentation: return "Any"; #define PRINT_COMPOSED_TYPE(type, value) \ @@ -606,8 +675,9 @@ const char* TypeImpl<Config>::bitset_name(int bitset) { template<class Config> -void TypeImpl<Config>::BitsetTypePrint(FILE* out, int bitset) { - const char* name = bitset_name(bitset); +void TypeImpl<Config>::BitsetType::BitsetTypePrint(FILE* out, int bitset) { + DisallowHeapAllocation no_allocation; + const char* name = Name(bitset); if (name != NULL) { PrintF(out, "%s", name); } else { @@ -628,7 +698,7 @@ void TypeImpl<Config>::BitsetTypePrint(FILE* out, int bitset) { if ((bitset & subset) == subset) { if (!is_first) PrintF(out, " | "); is_first = false; - PrintF(out, "%s", bitset_name(subset)); + PrintF(out, "%s", Name(subset)); bitset -= subset; } } @@ -640,41 +710,60 @@ void TypeImpl<Config>::BitsetTypePrint(FILE* out, int bitset) { template<class Config> void TypeImpl<Config>::TypePrint(FILE* out, PrintDimension dim) { + DisallowHeapAllocation no_allocation; if (this->IsBitset()) { int bitset = this->AsBitset(); switch (dim) { case BOTH_DIMS: - BitsetTypePrint(out, bitset & kSemantic); - PrintF("/"); - BitsetTypePrint(out, bitset & kRepresentation); + BitsetType::BitsetTypePrint(out, bitset & BitsetType::kSemantic); + PrintF(out, "/"); + BitsetType::BitsetTypePrint(out, bitset & BitsetType::kRepresentation); break; case SEMANTIC_DIM: - BitsetTypePrint(out, bitset & kSemantic); + BitsetType::BitsetTypePrint(out, bitset & BitsetType::kSemantic); break; case REPRESENTATION_DIM: - BitsetTypePrint(out, bitset & kRepresentation); + BitsetType::BitsetTypePrint(out, bitset & BitsetType::kRepresentation); break; } } else if (this->IsConstant()) { - PrintF(out, "Constant(%p : ", static_cast<void*>(*this->AsConstant())); - Config::from_bitset(this->LubBitset())->TypePrint(out); - PrintF(")"); + PrintF(out, "Constant(%p : ", + static_cast<void*>(*this->AsConstant()->Value())); + BitsetType::New(BitsetType::Lub(this))->TypePrint(out, dim); + PrintF(out, ")"); } else if (this->IsClass()) { - PrintF(out, "Class(%p < ", static_cast<void*>(*this->AsClass())); - Config::from_bitset(this->LubBitset())->TypePrint(out); - PrintF(")"); + PrintF(out, "Class(%p < ", static_cast<void*>(*this->AsClass()->Map())); + BitsetType::New(BitsetType::Lub(this))->TypePrint(out, dim); + PrintF(out, ")"); } else if (this->IsUnion()) { PrintF(out, "("); - UnionedHandle unioned = this->AsUnion(); - for (int i = 0; i < Config::union_length(unioned); ++i) { - TypeHandle type_i = Config::union_get(unioned, i); + UnionHandle unioned = handle(this->AsUnion()); + for (int i = 0; i < unioned->Length(); ++i) { + TypeHandle type_i = unioned->Get(i); if (i > 0) PrintF(out, " | "); - type_i->TypePrint(out); + type_i->TypePrint(out, dim); } PrintF(out, ")"); + } else if (this->IsArray()) { + PrintF(out, "["); + AsArray()->Element()->TypePrint(out, dim); + PrintF(out, "]"); + } else if (this->IsFunction()) { + if (!this->AsFunction()->Receiver()->IsAny()) { + this->AsFunction()->Receiver()->TypePrint(out, dim); + PrintF(out, "."); + } + PrintF(out, "("); + for (int i = 0; i < this->AsFunction()->Arity(); ++i) { + if (i > 0) PrintF(out, ", "); + this->AsFunction()->Parameter(i)->TypePrint(out, dim); + } + PrintF(out, ")->"); + this->AsFunction()->Result()->TypePrint(out, dim); + } else { + UNREACHABLE(); } } -#endif template class TypeImpl<ZoneTypeConfig>; @@ -692,5 +781,4 @@ template TypeImpl<HeapTypeConfig>::TypeHandle TypeImpl<HeapTypeConfig>::Convert<Type>( TypeImpl<ZoneTypeConfig>::TypeHandle, TypeImpl<HeapTypeConfig>::Region*); - } } // namespace v8::internal diff --git a/deps/v8/src/types.h b/deps/v8/src/types.h index 4569d131b..5ca3a8145 100644 --- a/deps/v8/src/types.h +++ b/deps/v8/src/types.h @@ -1,41 +1,17 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TYPES_H_ #define V8_TYPES_H_ -#include "v8.h" - -#include "objects.h" +#include "handles.h" namespace v8 { namespace internal { - +// SUMMARY +// // A simple type system for compiler-internal use. It is based entirely on // union types, and all subtyping hence amounts to set inclusion. Besides the // obvious primitive types and some predefined unions, the type language also @@ -45,12 +21,13 @@ namespace internal { // Types consist of two dimensions: semantic (value range) and representation. // Both are related through subtyping. // +// SEMANTIC DIMENSION +// // The following equations and inequations hold for the semantic axis: // // None <= T // T <= Any // -// Oddball = Boolean \/ Null \/ Undefined // Number = Signed32 \/ Unsigned32 \/ Double // Smi <= Signed32 // Name = String \/ Symbol @@ -66,11 +43,19 @@ namespace internal { // // Class(map) < T iff instance_type(map) < T // Constant(x) < T iff instance_type(map(x)) < T +// Array(T) < Array +// Function(R, S, T0, T1, ...) < Function // +// Both structural Array and Function types are invariant in all parameters. +// Relaxing this would make Union and Intersect operations more involved. // Note that Constant(x) < Class(map(x)) does _not_ hold, since x's map can // change! (Its instance type cannot, however.) // TODO(rossberg): the latter is not currently true for proxies, because of fix, // but will hold once we implement direct proxies. +// However, we also define a 'temporal' variant of the subtyping relation that +// considers the _current_ state only, i.e., Constant(x) <_now Class(map(x)). +// +// REPRESENTATIONAL DIMENSION // // For the representation axis, the following holds: // @@ -96,6 +81,8 @@ namespace internal { // SignedSmall /\ TaggedInt (a 'smi') // Number /\ TaggedPtr (a heap number) // +// PREDICATES +// // There are two main functions for testing types: // // T1->Is(T2) -- tests whether T1 is included in T2 (i.e., T1 <= T2) @@ -109,7 +96,23 @@ namespace internal { // lattice. That is intentional. It should always be possible to refine the // lattice (e.g., splitting up number types further) without invalidating any // existing assumptions or tests. -// Consequently, do not use pointer equality for type tests, always use Is! +// Consequently, do not normally use Equals for type tests, always use Is! +// +// The NowIs operator implements state-sensitive subtying, as described above. +// Any compilation decision based on such temporary properties requires runtime +// guarding! +// +// PROPERTIES +// +// Various formal properties hold for constructors, operators, and predicates +// over types. For example, constructors are injective, subtyping is a complete +// partial order, union and intersection satisfy the usual algebraic properties. +// +// See test/cctest/test-types.cc for a comprehensive executable specification, +// especially with respect to the properties of the more exotic 'temporal' +// constructors and predicates (those prefixed 'Now'). +// +// IMPLEMENTATION // // Internally, all 'primitive' types, and their unions, are represented as // bitsets. Class is a heap pointer to the respective map. Only Constant's, or @@ -168,7 +171,6 @@ namespace internal { V(Proxy, 1 << 15 | REPRESENTATION(kTaggedPtr)) \ V(Internal, 1 << 16 | REPRESENTATION(kTagged | kUntagged)) \ \ - V(Oddball, kBoolean | kNull | kUndefined) \ V(Signed32, kSignedSmall | kOtherSigned32) \ V(Number, kSigned32 | kUnsigned32 | kFloat) \ V(String, kInternalizedString | kOtherString) \ @@ -180,8 +182,9 @@ namespace internal { V(Detectable, kDetectableReceiver | kNumber | kName) \ V(Object, kDetectableObject | kUndetectable) \ V(Receiver, kObject | kProxy) \ - V(NonNumber, kOddball | kName | kReceiver | kInternal) \ - V(Any, kNumber | kNonNumber) + V(NonNumber, kBoolean | kName | kNull | kReceiver | \ + kUndefined | kInternal) \ + V(Any, -1) #define BITSET_TYPE_LIST(V) \ MASK_BITSET_TYPE_LIST(V) \ @@ -190,103 +193,152 @@ namespace internal { // struct Config { +// typedef TypeImpl<Config> Type; // typedef Base; -// typedef Unioned; +// typedef Struct; // typedef Region; // template<class> struct Handle { typedef type; } // No template typedefs... -// static Handle<Type>::type handle(Type* type); // !is_bitset(type) +// template<class T> static Handle<T>::type handle(T* t); // !is_bitset(t) +// template<class T> static Handle<T>::type cast(Handle<Type>::type); // static bool is_bitset(Type*); // static bool is_class(Type*); // static bool is_constant(Type*); -// static bool is_union(Type*); +// static bool is_struct(Type*, int tag); // static int as_bitset(Type*); // static i::Handle<i::Map> as_class(Type*); // static i::Handle<i::Object> as_constant(Type*); -// static Handle<Unioned>::type as_union(Type*); +// static Handle<Struct>::type as_struct(Type*); // static Type* from_bitset(int bitset); // static Handle<Type>::type from_bitset(int bitset, Region*); // static Handle<Type>::type from_class(i::Handle<Map>, int lub, Region*); // static Handle<Type>::type from_constant(i::Handle<Object>, int, Region*); -// static Handle<Type>::type from_union(Handle<Unioned>::type); -// static Handle<Unioned>::type union_create(int size, Region*); -// static void union_shrink(Handle<Unioned>::type, int size); -// static Handle<Type>::type union_get(Handle<Unioned>::type, int); -// static void union_set(Handle<Unioned>::type, int, Handle<Type>::type); -// static int union_length(Handle<Unioned>::type); +// static Handle<Type>::type from_struct(Handle<Struct>::type, int tag); +// static Handle<Struct>::type struct_create(int tag, int length, Region*); +// static void struct_shrink(Handle<Struct>::type, int length); +// static int struct_tag(Handle<Struct>::type); +// static int struct_length(Handle<Struct>::type); +// static Handle<Type>::type struct_get(Handle<Struct>::type, int); +// static void struct_set(Handle<Struct>::type, int, Handle<Type>::type); // static int lub_bitset(Type*); // } template<class Config> class TypeImpl : public Config::Base { public: + class BitsetType; // Internal + class StructuralType; // Internal + class UnionType; // Internal + + class ClassType; + class ConstantType; + class ArrayType; + class FunctionType; + typedef typename Config::template Handle<TypeImpl>::type TypeHandle; + typedef typename Config::template Handle<ClassType>::type ClassHandle; + typedef typename Config::template Handle<ConstantType>::type ConstantHandle; + typedef typename Config::template Handle<ArrayType>::type ArrayHandle; + typedef typename Config::template Handle<FunctionType>::type FunctionHandle; + typedef typename Config::template Handle<UnionType>::type UnionHandle; typedef typename Config::Region Region; - #define DEFINE_TYPE_CONSTRUCTOR(type, value) \ - static TypeImpl* type() { return Config::from_bitset(k##type); } \ - static TypeHandle type(Region* region) { \ - return Config::from_bitset(k##type, region); \ + #define DEFINE_TYPE_CONSTRUCTOR(type, value) \ + static TypeImpl* type() { return BitsetType::New(BitsetType::k##type); } \ + static TypeHandle type(Region* region) { \ + return BitsetType::New(BitsetType::k##type, region); \ } BITSET_TYPE_LIST(DEFINE_TYPE_CONSTRUCTOR) #undef DEFINE_TYPE_CONSTRUCTOR static TypeHandle Class(i::Handle<i::Map> map, Region* region) { - return Config::from_class(map, LubBitset(*map), region); + return ClassType::New(map, region); } static TypeHandle Constant(i::Handle<i::Object> value, Region* region) { - return Config::from_constant(value, LubBitset(*value), region); + return ConstantType::New(value, region); + } + static TypeHandle Array(TypeHandle element, Region* region) { + return ArrayType::New(element, region); + } + static FunctionHandle Function( + TypeHandle result, TypeHandle receiver, int arity, Region* region) { + return FunctionType::New(result, receiver, arity, region); + } + static TypeHandle Function(TypeHandle result, Region* region) { + return Function(result, Any(region), 0, region); + } + static TypeHandle Function( + TypeHandle result, TypeHandle param0, Region* region) { + FunctionHandle function = Function(result, Any(region), 1, region); + function->InitParameter(0, param0); + return function; + } + static TypeHandle Function( + TypeHandle result, TypeHandle param0, TypeHandle param1, Region* region) { + FunctionHandle function = Function(result, Any(region), 2, region); + function->InitParameter(0, param0); + function->InitParameter(1, param1); + return function; } static TypeHandle Union(TypeHandle type1, TypeHandle type2, Region* reg); static TypeHandle Intersect(TypeHandle type1, TypeHandle type2, Region* reg); + static TypeHandle Of(i::Object* value, Region* region) { + return Config::from_bitset(BitsetType::Lub(value), region); + } static TypeHandle Of(i::Handle<i::Object> value, Region* region) { - return Config::from_bitset(LubBitset(*value), region); + return Of(*value, region); + } + + bool IsInhabited() { + return !this->IsBitset() || BitsetType::IsInhabited(this->AsBitset()); } bool Is(TypeImpl* that) { return this == that || this->SlowIs(that); } template<class TypeHandle> bool Is(TypeHandle that) { return this->Is(*that); } + bool Maybe(TypeImpl* that); template<class TypeHandle> bool Maybe(TypeHandle that) { return this->Maybe(*that); } + bool Equals(TypeImpl* that) { return this->Is(that) && that->Is(this); } + template<class TypeHandle> + bool Equals(TypeHandle that) { return this->Equals(*that); } + + // Equivalent to Constant(value)->Is(this), but avoiding allocation. + bool Contains(i::Object* val); + bool Contains(i::Handle<i::Object> val) { return this->Contains(*val); } + // State-dependent versions of Of and Is that consider subtyping between // a constant and its map class. - static TypeHandle OfCurrently(i::Handle<i::Object> value, Region* region); - bool IsCurrently(TypeImpl* that); + static TypeHandle NowOf(i::Object* value, Region* region); + static TypeHandle NowOf(i::Handle<i::Object> value, Region* region) { + return NowOf(*value, region); + } + bool NowIs(TypeImpl* that); template<class TypeHandle> - bool IsCurrently(TypeHandle that) { return this->IsCurrently(*that); } + bool NowIs(TypeHandle that) { return this->NowIs(*that); } + inline bool NowContains(i::Object* val); + bool NowContains(i::Handle<i::Object> val) { return this->NowContains(*val); } + + bool NowStable(); bool IsClass() { return Config::is_class(this); } bool IsConstant() { return Config::is_constant(this); } - i::Handle<i::Map> AsClass() { return Config::as_class(this); } - i::Handle<i::Object> AsConstant() { return Config::as_constant(this); } + bool IsArray() { return Config::is_struct(this, StructuralType::kArrayTag); } + bool IsFunction() { + return Config::is_struct(this, StructuralType::kFunctionTag); + } + + ClassType* AsClass() { return ClassType::cast(this); } + ConstantType* AsConstant() { return ConstantType::cast(this); } + ArrayType* AsArray() { return ArrayType::cast(this); } + FunctionType* AsFunction() { return FunctionType::cast(this); } int NumClasses(); int NumConstants(); - template<class T> - class Iterator { - public: - bool Done() const { return index_ < 0; } - i::Handle<T> Current(); - void Advance(); - - private: - template<class> friend class TypeImpl; - - Iterator() : index_(-1) {} - explicit Iterator(TypeHandle type) : type_(type), index_(-1) { - Advance(); - } - - inline bool matches(TypeHandle type); - inline TypeHandle get_type(); - - TypeHandle type_; - int index_; - }; - + template<class T> class Iterator; Iterator<i::Map> Classes() { if (this->IsBitset()) return Iterator<i::Map>(); return Iterator<i::Map>(Config::handle(this)); @@ -296,32 +348,53 @@ class TypeImpl : public Config::Base { return Iterator<i::Object>(Config::handle(this)); } - static TypeImpl* cast(typename Config::Base* object) { - TypeImpl* t = static_cast<TypeImpl*>(object); - ASSERT(t->IsBitset() || t->IsClass() || t->IsConstant() || t->IsUnion()); - return t; - } + static inline TypeImpl* cast(typename Config::Base* object); template<class OtherTypeImpl> static TypeHandle Convert( typename OtherTypeImpl::TypeHandle type, Region* region); -#ifdef OBJECT_PRINT enum PrintDimension { BOTH_DIMS, SEMANTIC_DIM, REPRESENTATION_DIM }; void TypePrint(PrintDimension = BOTH_DIMS); void TypePrint(FILE* out, PrintDimension = BOTH_DIMS); -#endif - private: + protected: template<class> friend class Iterator; template<class> friend class TypeImpl; - // A union is a fixed array containing types. Invariants: - // - its length is at least 2 - // - at most one field is a bitset, and it must go into index 0 - // - no field is a union - typedef typename Config::Unioned Unioned; - typedef typename Config::template Handle<Unioned>::type UnionedHandle; + template<class T> + static typename Config::template Handle<T>::type handle(T* type) { + return Config::handle(type); + } + + bool IsNone() { return this == None(); } + bool IsAny() { return this == Any(); } + bool IsBitset() { return Config::is_bitset(this); } + bool IsUnion() { return Config::is_struct(this, StructuralType::kUnionTag); } + + int AsBitset() { + ASSERT(this->IsBitset()); + return static_cast<BitsetType*>(this)->Bitset(); + } + UnionType* AsUnion() { return UnionType::cast(this); } + + bool SlowIs(TypeImpl* that); + + bool InUnion(UnionHandle unioned, int current_size); + static int ExtendUnion( + UnionHandle unioned, TypeHandle t, int current_size); + static int ExtendIntersection( + UnionHandle unioned, TypeHandle t, TypeHandle other, int current_size); + + int BitsetGlb() { return BitsetType::Glb(this); } + int BitsetLub() { return BitsetType::Lub(this); } +}; + + +template<class Config> +class TypeImpl<Config>::BitsetType : public TypeImpl<Config> { + private: + friend class TypeImpl<Config>; enum { #define DECLARE_TYPE(type, value) k##type = (value), @@ -330,260 +403,271 @@ class TypeImpl : public Config::Base { kUnusedEOL = 0 }; - bool IsNone() { return this == None(); } - bool IsAny() { return this == Any(); } - bool IsBitset() { return Config::is_bitset(this); } - bool IsUnion() { return Config::is_union(this); } - int AsBitset() { return Config::as_bitset(this); } - UnionedHandle AsUnion() { return Config::as_union(this); } + int Bitset() { return Config::as_bitset(this); } - static int UnionLength(UnionedHandle unioned) { - return Config::union_length(unioned); + static BitsetType* New(int bitset) { + return static_cast<BitsetType*>(Config::from_bitset(bitset)); } - static TypeHandle UnionGet(UnionedHandle unioned, int i) { - return Config::union_get(unioned, i); + static TypeHandle New(int bitset, Region* region) { + return Config::from_bitset(bitset, region); } - bool SlowIs(TypeImpl* that); - static bool IsInhabited(int bitset) { return (bitset & kRepresentation) && (bitset & kSemantic); } - int LubBitset(); // least upper bound that's a bitset - int GlbBitset(); // greatest lower bound that's a bitset - - static int LubBitset(i::Object* value); - static int LubBitset(i::Map* map); - - bool InUnion(UnionedHandle unioned, int current_size); - static int ExtendUnion( - UnionedHandle unioned, TypeHandle t, int current_size); - static int ExtendIntersection( - UnionedHandle unioned, TypeHandle t, TypeHandle other, int current_size); + static int Glb(TypeImpl* type); // greatest lower bound that's a bitset + static int Lub(TypeImpl* type); // least upper bound that's a bitset + static int Lub(i::Object* value); + static int Lub(i::Map* map); -#ifdef OBJECT_PRINT - static const char* bitset_name(int bitset); + static const char* Name(int bitset); static void BitsetTypePrint(FILE* out, int bitset); -#endif }; -// Zone-allocated types are either (odd) integers to represent bitsets, or -// (even) pointers to zone lists for everything else. The first slot of every -// list is an explicit tag value to distinguish representation. -struct ZoneTypeConfig { - private: - typedef i::ZoneList<void*> Tagged; +// Internal +// A structured type contains a tag and a variable number of type fields. +template<class Config> +class TypeImpl<Config>::StructuralType : public TypeImpl<Config> { + protected: + template<class> friend class TypeImpl; + friend struct ZoneTypeConfig; // For tags. + friend struct HeapTypeConfig; enum Tag { kClassTag, kConstantTag, + kArrayTag, + kFunctionTag, kUnionTag }; - static Tagged* tagged_create(Tag tag, int size, Zone* zone) { - Tagged* tagged = new(zone) Tagged(size + 1, zone); - tagged->Add(reinterpret_cast<void*>(tag), zone); - tagged->AddBlock(NULL, size, zone); - return tagged; + int Length() { + return Config::struct_length(Config::as_struct(this)); } - static void tagged_shrink(Tagged* tagged, int size) { - tagged->Rewind(size + 1); + TypeHandle Get(int i) { + return Config::struct_get(Config::as_struct(this), i); } - static Tag tagged_tag(Tagged* tagged) { - return static_cast<Tag>(reinterpret_cast<intptr_t>(tagged->at(0))); + void Set(int i, TypeHandle type) { + Config::struct_set(Config::as_struct(this), i, type); } - template<class T> - static T tagged_get(Tagged* tagged, int i) { - return reinterpret_cast<T>(tagged->at(i + 1)); - } - template<class T> - static void tagged_set(Tagged* tagged, int i, T value) { - tagged->at(i + 1) = reinterpret_cast<void*>(value); + void Shrink(int length) { + Config::struct_shrink(Config::as_struct(this), length); } - static int tagged_length(Tagged* tagged) { - return tagged->length() - 1; + + static TypeHandle New(Tag tag, int length, Region* region) { + return Config::from_struct(Config::struct_create(tag, length, region)); } +}; - public: - typedef TypeImpl<ZoneTypeConfig> Type; - class Base {}; - typedef i::ZoneList<Type*> Unioned; - typedef i::Zone Region; - template<class T> struct Handle { typedef T* type; }; - static Type* handle(Type* type) { return type; } +template<class Config> +class TypeImpl<Config>::ClassType : public TypeImpl<Config> { + public: + i::Handle<i::Map> Map() { return Config::as_class(this); } - static bool is(Type* type, Tag tag) { - return is_tagged(type) && tagged_tag(as_tagged(type)) == tag; + static ClassHandle New(i::Handle<i::Map> map, Region* region) { + return Config::template cast<ClassType>( + Config::from_class(map, BitsetType::Lub(*map), region)); } - static bool is_bitset(Type* type) { - return reinterpret_cast<intptr_t>(type) & 1; - } - static bool is_tagged(Type* type) { return !is_bitset(type); } - static bool is_class(Type* type) { return is(type, kClassTag); } - static bool is_constant(Type* type) { return is(type, kConstantTag); } - static bool is_union(Type* type) { return is(type, kUnionTag); } - static bool tagged_is_union(Tagged* tagged) { - return is(from_tagged(tagged), kUnionTag); + static ClassType* cast(TypeImpl* type) { + ASSERT(type->IsClass()); + return static_cast<ClassType*>(type); } +}; - static int as_bitset(Type* type) { - ASSERT(is_bitset(type)); - return static_cast<int>(reinterpret_cast<intptr_t>(type) >> 1); - } - static Tagged* as_tagged(Type* type) { - ASSERT(is_tagged(type)); - return reinterpret_cast<Tagged*>(type); - } - static i::Handle<i::Map> as_class(Type* type) { - ASSERT(is_class(type)); - return i::Handle<i::Map>(tagged_get<i::Map**>(as_tagged(type), 1)); - } - static i::Handle<i::Object> as_constant(Type* type) { - ASSERT(is_constant(type)); - return i::Handle<i::Object>(tagged_get<i::Object**>(as_tagged(type), 1)); - } - static Unioned* as_union(Type* type) { - ASSERT(is_union(type)); - return tagged_as_union(as_tagged(type)); - } - static Unioned* tagged_as_union(Tagged* tagged) { - ASSERT(tagged_is_union(tagged)); - return reinterpret_cast<Unioned*>(tagged); - } - static Type* from_bitset(int bitset) { - return reinterpret_cast<Type*>((bitset << 1) | 1); - } - static Type* from_bitset(int bitset, Zone* Zone) { - return from_bitset(bitset); - } - static Type* from_tagged(Tagged* tagged) { - return reinterpret_cast<Type*>(tagged); - } - static Type* from_class(i::Handle<i::Map> map, int lub, Zone* zone) { - Tagged* tagged = tagged_create(kClassTag, 2, zone); - tagged_set(tagged, 0, lub); - tagged_set(tagged, 1, map.location()); - return from_tagged(tagged); +template<class Config> +class TypeImpl<Config>::ConstantType : public TypeImpl<Config> { + public: + i::Handle<i::Object> Value() { return Config::as_constant(this); } + + static ConstantHandle New(i::Handle<i::Object> value, Region* region) { + return Config::template cast<ConstantType>( + Config::from_constant(value, BitsetType::Lub(*value), region)); } - static Type* from_constant(i::Handle<i::Object> value, int lub, Zone* zone) { - Tagged* tagged = tagged_create(kConstantTag, 2, zone); - tagged_set(tagged, 0, lub); - tagged_set(tagged, 1, value.location()); - return from_tagged(tagged); + + static ConstantType* cast(TypeImpl* type) { + ASSERT(type->IsConstant()); + return static_cast<ConstantType*>(type); } - static Type* from_union(Unioned* unioned) { - return from_tagged(tagged_from_union(unioned)); +}; + + +// Internal +// A union is a structured type with the following invariants: +// - its length is at least 2 +// - at most one field is a bitset, and it must go into index 0 +// - no field is a union +template<class Config> +class TypeImpl<Config>::UnionType : public StructuralType { + public: + static UnionHandle New(int length, Region* region) { + return Config::template cast<UnionType>( + StructuralType::New(StructuralType::kUnionTag, length, region)); } - static Tagged* tagged_from_union(Unioned* unioned) { - return reinterpret_cast<Tagged*>(unioned); + + static UnionType* cast(TypeImpl* type) { + ASSERT(type->IsUnion()); + return static_cast<UnionType*>(type); } +}; + + +template<class Config> +class TypeImpl<Config>::ArrayType : public StructuralType { + public: + TypeHandle Element() { return this->Get(0); } - static Unioned* union_create(int size, Zone* zone) { - return tagged_as_union(tagged_create(kUnionTag, size, zone)); + static ArrayHandle New(TypeHandle element, Region* region) { + ArrayHandle type = Config::template cast<ArrayType>( + StructuralType::New(StructuralType::kArrayTag, 1, region)); + type->Set(0, element); + return type; } - static void union_shrink(Unioned* unioned, int size) { - tagged_shrink(tagged_from_union(unioned), size); + + static ArrayType* cast(TypeImpl* type) { + ASSERT(type->IsArray()); + return static_cast<ArrayType*>(type); } - static Type* union_get(Unioned* unioned, int i) { - Type* type = tagged_get<Type*>(tagged_from_union(unioned), i); - ASSERT(!is_union(type)); +}; + + +template<class Config> +class TypeImpl<Config>::FunctionType : public StructuralType { + public: + int Arity() { return this->Length() - 2; } + TypeHandle Result() { return this->Get(0); } + TypeHandle Receiver() { return this->Get(1); } + TypeHandle Parameter(int i) { return this->Get(2 + i); } + + void InitParameter(int i, TypeHandle type) { this->Set(2 + i, type); } + + static FunctionHandle New( + TypeHandle result, TypeHandle receiver, int arity, Region* region) { + FunctionHandle type = Config::template cast<FunctionType>( + StructuralType::New(StructuralType::kFunctionTag, 2 + arity, region)); + type->Set(0, result); + type->Set(1, receiver); return type; } - static void union_set(Unioned* unioned, int i, Type* type) { - ASSERT(!is_union(type)); - tagged_set(tagged_from_union(unioned), i, type); - } - static int union_length(Unioned* unioned) { - return tagged_length(tagged_from_union(unioned)); + + static FunctionType* cast(TypeImpl* type) { + ASSERT(type->IsFunction()); + return static_cast<FunctionType*>(type); } - static int lub_bitset(Type* type) { - ASSERT(is_class(type) || is_constant(type)); - return static_cast<int>(tagged_get<intptr_t>(as_tagged(type), 0)); +}; + + +template<class Config> template<class T> +class TypeImpl<Config>::Iterator { + public: + bool Done() const { return index_ < 0; } + i::Handle<T> Current(); + void Advance(); + + private: + template<class> friend class TypeImpl; + + Iterator() : index_(-1) {} + explicit Iterator(TypeHandle type) : type_(type), index_(-1) { + Advance(); } + + inline bool matches(TypeHandle type); + inline TypeHandle get_type(); + + TypeHandle type_; + int index_; +}; + + +// Zone-allocated types are either (odd) integers to represent bitsets, or +// (even) pointers to structures for everything else. +struct ZoneTypeConfig { + typedef TypeImpl<ZoneTypeConfig> Type; + class Base {}; + typedef void* Struct; + typedef i::Zone Region; + template<class T> struct Handle { typedef T* type; }; + + template<class T> static inline T* handle(T* type); + template<class T> static inline T* cast(Type* type); + + static inline bool is_bitset(Type* type); + static inline bool is_class(Type* type); + static inline bool is_constant(Type* type); + static inline bool is_struct(Type* type, int tag); + + static inline int as_bitset(Type* type); + static inline Struct* as_struct(Type* type); + static inline i::Handle<i::Map> as_class(Type* type); + static inline i::Handle<i::Object> as_constant(Type* type); + + static inline Type* from_bitset(int bitset); + static inline Type* from_bitset(int bitset, Zone* zone); + static inline Type* from_struct(Struct* structured); + static inline Type* from_class(i::Handle<i::Map> map, int lub, Zone* zone); + static inline Type* from_constant( + i::Handle<i::Object> value, int lub, Zone* zone); + + static inline Struct* struct_create(int tag, int length, Zone* zone); + static inline void struct_shrink(Struct* structured, int length); + static inline int struct_tag(Struct* structured); + static inline int struct_length(Struct* structured); + static inline Type* struct_get(Struct* structured, int i); + static inline void struct_set(Struct* structured, int i, Type* type); + + static inline int lub_bitset(Type* type); }; +typedef TypeImpl<ZoneTypeConfig> Type; + // Heap-allocated types are either smis for bitsets, maps for classes, boxes for // constants, or fixed arrays for unions. struct HeapTypeConfig { typedef TypeImpl<HeapTypeConfig> Type; typedef i::Object Base; - typedef i::FixedArray Unioned; + typedef i::FixedArray Struct; typedef i::Isolate Region; template<class T> struct Handle { typedef i::Handle<T> type; }; - static i::Handle<Type> handle(Type* type) { - return i::handle(type, i::HeapObject::cast(type)->GetIsolate()); - } - - static bool is_bitset(Type* type) { return type->IsSmi(); } - static bool is_class(Type* type) { return type->IsMap(); } - static bool is_constant(Type* type) { return type->IsBox(); } - static bool is_union(Type* type) { return type->IsFixedArray(); } - - static int as_bitset(Type* type) { - return Smi::cast(type)->value(); - } - static i::Handle<i::Map> as_class(Type* type) { - return i::handle(i::Map::cast(type)); - } - static i::Handle<i::Object> as_constant(Type* type) { - i::Box* box = i::Box::cast(type); - return i::handle(box->value(), box->GetIsolate()); - } - static i::Handle<Unioned> as_union(Type* type) { - return i::handle(i::FixedArray::cast(type)); - } - - static Type* from_bitset(int bitset) { - return Type::cast(i::Smi::FromInt(bitset)); - } - static i::Handle<Type> from_bitset(int bitset, Isolate* isolate) { - return i::handle(from_bitset(bitset), isolate); - } - static i::Handle<Type> from_class( - i::Handle<i::Map> map, int lub, Isolate* isolate) { - return i::Handle<Type>::cast(i::Handle<Object>::cast(map)); - } - static i::Handle<Type> from_constant( - i::Handle<i::Object> value, int lub, Isolate* isolate) { - i::Handle<Box> box = isolate->factory()->NewBox(value); - return i::Handle<Type>::cast(i::Handle<Object>::cast(box)); - } - static i::Handle<Type> from_union(i::Handle<Unioned> unioned) { - return i::Handle<Type>::cast(i::Handle<Object>::cast(unioned)); - } - - static i::Handle<Unioned> union_create(int size, Isolate* isolate) { - return isolate->factory()->NewFixedArray(size); - } - static void union_shrink(i::Handle<Unioned> unioned, int size) { - unioned->Shrink(size); - } - static i::Handle<Type> union_get(i::Handle<Unioned> unioned, int i) { - Type* type = static_cast<Type*>(unioned->get(i)); - ASSERT(!is_union(type)); - return i::handle(type, unioned->GetIsolate()); - } - static void union_set( - i::Handle<Unioned> unioned, int i, i::Handle<Type> type) { - ASSERT(!is_union(*type)); - unioned->set(i, *type); - } - static int union_length(i::Handle<Unioned> unioned) { - return unioned->length(); - } - static int lub_bitset(Type* type) { - return 0; // kNone, which causes recomputation. - } + template<class T> static inline i::Handle<T> handle(T* type); + template<class T> static inline i::Handle<T> cast(i::Handle<Type> type); + + static inline bool is_bitset(Type* type); + static inline bool is_class(Type* type); + static inline bool is_constant(Type* type); + static inline bool is_struct(Type* type, int tag); + + static inline int as_bitset(Type* type); + static inline i::Handle<i::Map> as_class(Type* type); + static inline i::Handle<i::Object> as_constant(Type* type); + static inline i::Handle<Struct> as_struct(Type* type); + + static inline Type* from_bitset(int bitset); + static inline i::Handle<Type> from_bitset(int bitset, Isolate* isolate); + static inline i::Handle<Type> from_class( + i::Handle<i::Map> map, int lub, Isolate* isolate); + static inline i::Handle<Type> from_constant( + i::Handle<i::Object> value, int lub, Isolate* isolate); + static inline i::Handle<Type> from_struct(i::Handle<Struct> structured); + + static inline i::Handle<Struct> struct_create( + int tag, int length, Isolate* isolate); + static inline void struct_shrink(i::Handle<Struct> structured, int length); + static inline int struct_tag(i::Handle<Struct> structured); + static inline int struct_length(i::Handle<Struct> structured); + static inline i::Handle<Type> struct_get(i::Handle<Struct> structured, int i); + static inline void struct_set( + i::Handle<Struct> structured, int i, i::Handle<Type> type); + + static inline int lub_bitset(Type* type); }; -typedef TypeImpl<ZoneTypeConfig> Type; typedef TypeImpl<HeapTypeConfig> HeapType; @@ -643,7 +727,6 @@ struct BoundsImpl { typedef BoundsImpl<ZoneTypeConfig> Bounds; - } } // namespace v8::internal #endif // V8_TYPES_H_ diff --git a/deps/v8/src/typing.cc b/deps/v8/src/typing.cc index 2a581e293..434aff349 100644 --- a/deps/v8/src/typing.cc +++ b/deps/v8/src/typing.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "typing.h" @@ -39,8 +16,9 @@ namespace internal { AstTyper::AstTyper(CompilationInfo* info) : info_(info), oracle_( - Handle<Code>(info->closure()->shared()->code()), - Handle<Context>(info->closure()->context()->native_context()), + handle(info->closure()->shared()->code()), + handle(info->closure()->shared()->feedback_vector()), + handle(info->closure()->context()->native_context()), info->zone()), store_(info->zone()) { InitializeAstVisitor(info->zone()); @@ -83,7 +61,7 @@ void AstTyper::Run(CompilationInfo* info) { Effect AstTyper::ObservedOnStack(Object* value) { - Type* lower = Type::OfCurrently(handle(value, isolate()), zone()); + Type* lower = Type::NowOf(value, zone()); return Effect(Bounds(lower, Type::Any(zone()))); } @@ -530,7 +508,7 @@ void AstTyper::VisitCall(Call* expr) { // Collect type feedback. RECURSE(Visit(expr->expression())); if (!expr->expression()->IsProperty() && - expr->HasCallFeedbackSlot() && + expr->IsUsingCallFeedbackSlot(isolate()) && oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) { expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot())); } diff --git a/deps/v8/src/typing.h b/deps/v8/src/typing.h index 0517812ec..71a63c309 100644 --- a/deps/v8/src/typing.h +++ b/deps/v8/src/typing.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_TYPING_H_ #define V8_TYPING_H_ diff --git a/deps/v8/src/unbound-queue-inl.h b/deps/v8/src/unbound-queue-inl.h index 796ba401d..7c2e8bc4f 100644 --- a/deps/v8/src/unbound-queue-inl.h +++ b/deps/v8/src/unbound-queue-inl.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UNBOUND_QUEUE_INL_H_ #define V8_UNBOUND_QUEUE_INL_H_ diff --git a/deps/v8/src/unbound-queue.h b/deps/v8/src/unbound-queue.h index 429e3c673..35a3ef499 100644 --- a/deps/v8/src/unbound-queue.h +++ b/deps/v8/src/unbound-queue.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UNBOUND_QUEUE_ #define V8_UNBOUND_QUEUE_ diff --git a/deps/v8/src/unicode-inl.h b/deps/v8/src/unicode-inl.h index 99eca644b..a0142d225 100644 --- a/deps/v8/src/unicode-inl.h +++ b/deps/v8/src/unicode-inl.h @@ -1,29 +1,6 @@ // Copyright 2007-2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UNICODE_INL_H_ #define V8_UNICODE_INL_H_ diff --git a/deps/v8/src/unicode.cc b/deps/v8/src/unicode.cc index 2bef7ab20..3322110ab 100644 --- a/deps/v8/src/unicode.cc +++ b/deps/v8/src/unicode.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // // This file was generated at 2014-02-07 15:31:16.733174 diff --git a/deps/v8/src/unicode.h b/deps/v8/src/unicode.h index 65a9af58f..ecb5ab4d7 100644 --- a/deps/v8/src/unicode.h +++ b/deps/v8/src/unicode.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UNICODE_H_ #define V8_UNICODE_H_ diff --git a/deps/v8/src/unique.h b/deps/v8/src/unique.h index 2f6008c5a..8ed268290 100644 --- a/deps/v8/src/unique.h +++ b/deps/v8/src/unique.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_HYDROGEN_UNIQUE_H_ #define V8_HYDROGEN_UNIQUE_H_ @@ -157,6 +134,19 @@ class UniqueSet V8_FINAL : public ZoneObject { // Constructor. A new set will be empty. UniqueSet() : size_(0), capacity_(0), array_(NULL) { } + // Capacity constructor. A new set will be empty. + UniqueSet(int capacity, Zone* zone) + : size_(0), capacity_(capacity), + array_(zone->NewArray<Unique<T> >(capacity)) { + ASSERT(capacity <= kMaxCapacity); + } + + // Singleton constructor. + UniqueSet(Unique<T> uniq, Zone* zone) + : size_(1), capacity_(1), array_(zone->NewArray<Unique<T> >(1)) { + array_[0] = uniq; + } + // Add a new element to this unique set. Mutates this set. O(|this|). void Add(Unique<T> uniq, Zone* zone) { ASSERT(uniq.IsInitialized()); @@ -189,7 +179,7 @@ class UniqueSet V8_FINAL : public ZoneObject { } // Compare this set against another set. O(|this|). - bool Equals(UniqueSet<T>* that) const { + bool Equals(const UniqueSet<T>* that) const { if (that->size_ != this->size_) return false; for (int i = 0; i < this->size_; i++) { if (this->array_[i] != that->array_[i]) return false; @@ -200,15 +190,18 @@ class UniqueSet V8_FINAL : public ZoneObject { // Check whether this set contains the given element. O(|this|) // TODO(titzer): use binary search for large sets to make this O(log|this|) template <typename U> - bool Contains(Unique<U> elem) const { - for (int i = 0; i < size_; i++) { - if (this->array_[i] == elem) return true; + bool Contains(const Unique<U> elem) const { + for (int i = 0; i < this->size_; ++i) { + Unique<T> cand = this->array_[i]; + if (cand.raw_address_ >= elem.raw_address_) { + return cand.raw_address_ == elem.raw_address_; + } } return false; } // Check if this set is a subset of the given set. O(|this| + |that|). - bool IsSubset(UniqueSet<T>* that) const { + bool IsSubset(const UniqueSet<T>* that) const { if (that->size_ < this->size_) return false; int j = 0; for (int i = 0; i < this->size_; i++) { @@ -224,11 +217,11 @@ class UniqueSet V8_FINAL : public ZoneObject { // Returns a new set representing the intersection of this set and the other. // O(|this| + |that|). - UniqueSet<T>* Intersect(UniqueSet<T>* that, Zone* zone) const { + UniqueSet<T>* Intersect(const UniqueSet<T>* that, Zone* zone) const { if (that->size_ == 0 || this->size_ == 0) return new(zone) UniqueSet<T>(); - UniqueSet<T>* out = new(zone) UniqueSet<T>(); - out->Grow(Min(this->size_, that->size_), zone); + UniqueSet<T>* out = new(zone) UniqueSet<T>( + Min(this->size_, that->size_), zone); int i = 0, j = 0, k = 0; while (i < this->size_ && j < that->size_) { @@ -251,12 +244,12 @@ class UniqueSet V8_FINAL : public ZoneObject { // Returns a new set representing the union of this set and the other. // O(|this| + |that|). - UniqueSet<T>* Union(UniqueSet<T>* that, Zone* zone) const { + UniqueSet<T>* Union(const UniqueSet<T>* that, Zone* zone) const { if (that->size_ == 0) return this->Copy(zone); if (this->size_ == 0) return that->Copy(zone); - UniqueSet<T>* out = new(zone) UniqueSet<T>(); - out->Grow(this->size_ + that->size_, zone); + UniqueSet<T>* out = new(zone) UniqueSet<T>( + this->size_ + that->size_, zone); int i = 0, j = 0, k = 0; while (i < this->size_ && j < that->size_) { @@ -284,10 +277,8 @@ class UniqueSet V8_FINAL : public ZoneObject { // Makes an exact copy of this set. O(|this|). UniqueSet<T>* Copy(Zone* zone) const { - UniqueSet<T>* copy = new(zone) UniqueSet<T>(); + UniqueSet<T>* copy = new(zone) UniqueSet<T>(this->size_, zone); copy->size_ = this->size_; - copy->capacity_ = this->size_; - copy->array_ = zone->NewArray<Unique<T> >(this->size_); memcpy(copy->array_, this->array_, this->size_ * sizeof(Unique<T>)); return copy; } diff --git a/deps/v8/src/uri.h b/deps/v8/src/uri.h index 1e73ddd3d..6b76525fe 100644 --- a/deps/v8/src/uri.h +++ b/deps/v8/src/uri.h @@ -1,38 +1,15 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_URI_H_ #define V8_URI_H_ #include "v8.h" +#include "conversions.h" #include "string-search.h" -#include "v8utils.h" -#include "v8conversions.h" +#include "utils.h" namespace v8 { namespace internal { @@ -61,13 +38,14 @@ Vector<const uc16> GetCharVector(Handle<String> string) { class URIUnescape : public AllStatic { public: template<typename Char> - static Handle<String> Unescape(Isolate* isolate, Handle<String> source); + MUST_USE_RESULT static MaybeHandle<String> Unescape(Isolate* isolate, + Handle<String> source); private: static const signed char kHexValue['g']; template<typename Char> - static Handle<String> UnescapeSlow( + MUST_USE_RESULT static MaybeHandle<String> UnescapeSlow( Isolate* isolate, Handle<String> string, int start_index); static INLINE(int TwoDigitHex(uint16_t character1, uint16_t character2)); @@ -91,7 +69,8 @@ const signed char URIUnescape::kHexValue[] = { template<typename Char> -Handle<String> URIUnescape::Unescape(Isolate* isolate, Handle<String> source) { +MaybeHandle<String> URIUnescape::Unescape(Isolate* isolate, + Handle<String> source) { int index; { DisallowHeapAllocation no_allocation; StringSearch<uint8_t, Char> search(isolate, STATIC_ASCII_VECTOR("%")); @@ -103,7 +82,7 @@ Handle<String> URIUnescape::Unescape(Isolate* isolate, Handle<String> source) { template <typename Char> -Handle<String> URIUnescape::UnescapeSlow( +MaybeHandle<String> URIUnescape::UnescapeSlow( Isolate* isolate, Handle<String> string, int start_index) { bool one_byte = true; int length = string->length(); @@ -129,9 +108,8 @@ Handle<String> URIUnescape::UnescapeSlow( Handle<String> second_part; ASSERT(unescaped_length <= String::kMaxLength); if (one_byte) { - Handle<SeqOneByteString> dest = - isolate->factory()->NewRawOneByteString(unescaped_length); - ASSERT(!dest.is_null()); + Handle<SeqOneByteString> dest = isolate->factory()->NewRawOneByteString( + unescaped_length).ToHandleChecked(); DisallowHeapAllocation no_allocation; Vector<const Char> vector = GetCharVector<Char>(string); for (int i = start_index; i < length; dest_position++) { @@ -142,9 +120,8 @@ Handle<String> URIUnescape::UnescapeSlow( } second_part = dest; } else { - Handle<SeqTwoByteString> dest = - isolate->factory()->NewRawTwoByteString(unescaped_length); - ASSERT(!dest.is_null()); + Handle<SeqTwoByteString> dest = isolate->factory()->NewRawTwoByteString( + unescaped_length).ToHandleChecked(); DisallowHeapAllocation no_allocation; Vector<const Char> vector = GetCharVector<Char>(string); for (int i = start_index; i < length; dest_position++) { @@ -203,7 +180,8 @@ int URIUnescape::UnescapeChar(Vector<const Char> vector, class URIEscape : public AllStatic { public: template<typename Char> - static Handle<String> Escape(Isolate* isolate, Handle<String> string); + MUST_USE_RESULT static MaybeHandle<String> Escape(Isolate* isolate, + Handle<String> string); private: static const char kHexChars[17]; @@ -247,7 +225,7 @@ const char URIEscape::kNotEscaped[] = { template<typename Char> -Handle<String> URIEscape::Escape(Isolate* isolate, Handle<String> string) { +MaybeHandle<String> URIEscape::Escape(Isolate* isolate, Handle<String> string) { ASSERT(string->IsFlat()); int escaped_length = 0; int length = string->length(); @@ -273,9 +251,11 @@ Handle<String> URIEscape::Escape(Isolate* isolate, Handle<String> string) { // No length change implies no change. Return original string if no change. if (escaped_length == length) return string; - Handle<SeqOneByteString> dest = - isolate->factory()->NewRawOneByteString(escaped_length); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, dest, Handle<String>()); + Handle<SeqOneByteString> dest; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, dest, + isolate->factory()->NewRawOneByteString(escaped_length), + String); int dest_position = 0; { DisallowHeapAllocation no_allocation; diff --git a/deps/v8/src/uri.js b/deps/v8/src/uri.js index 4e3f084af..0e50f0b70 100644 --- a/deps/v8/src/uri.js +++ b/deps/v8/src/uri.js @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declaration has been made // in runtime.js: diff --git a/deps/v8/src/utils-inl.h b/deps/v8/src/utils-inl.h index 76a3c104e..8c89f65f7 100644 --- a/deps/v8/src/utils-inl.h +++ b/deps/v8/src/utils-inl.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UTILS_INL_H_ #define V8_UTILS_INL_H_ diff --git a/deps/v8/src/utils.cc b/deps/v8/src/utils.cc index 6838cb069..7af30f27b 100644 --- a/deps/v8/src/utils.cc +++ b/deps/v8/src/utils.cc @@ -1,32 +1,12 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <stdarg.h> -#include "../include/v8stdint.h" +#include <sys/stat.h> + +#include "v8.h" + #include "checks.h" #include "platform.h" #include "utils.h" @@ -97,4 +77,241 @@ char* SimpleStringBuilder::Finalize() { } +void PrintF(const char* format, ...) { + va_list arguments; + va_start(arguments, format); + OS::VPrint(format, arguments); + va_end(arguments); +} + + +void PrintF(FILE* out, const char* format, ...) { + va_list arguments; + va_start(arguments, format); + OS::VFPrint(out, format, arguments); + va_end(arguments); +} + + +void PrintPID(const char* format, ...) { + OS::Print("[%d] ", OS::GetCurrentProcessId()); + va_list arguments; + va_start(arguments, format); + OS::VPrint(format, arguments); + va_end(arguments); +} + + +void Flush(FILE* out) { + fflush(out); +} + + +char* ReadLine(const char* prompt) { + char* result = NULL; + char line_buf[256]; + int offset = 0; + bool keep_going = true; + fprintf(stdout, "%s", prompt); + fflush(stdout); + while (keep_going) { + if (fgets(line_buf, sizeof(line_buf), stdin) == NULL) { + // fgets got an error. Just give up. + if (result != NULL) { + DeleteArray(result); + } + return NULL; + } + int len = StrLength(line_buf); + if (len > 1 && + line_buf[len - 2] == '\\' && + line_buf[len - 1] == '\n') { + // When we read a line that ends with a "\" we remove the escape and + // append the remainder. + line_buf[len - 2] = '\n'; + line_buf[len - 1] = 0; + len -= 1; + } else if ((len > 0) && (line_buf[len - 1] == '\n')) { + // Since we read a new line we are done reading the line. This + // will exit the loop after copying this buffer into the result. + keep_going = false; + } + if (result == NULL) { + // Allocate the initial result and make room for the terminating '\0' + result = NewArray<char>(len + 1); + } else { + // Allocate a new result with enough room for the new addition. + int new_len = offset + len + 1; + char* new_result = NewArray<char>(new_len); + // Copy the existing input into the new array and set the new + // array as the result. + OS::MemCopy(new_result, result, offset * kCharSize); + DeleteArray(result); + result = new_result; + } + // Copy the newly read line into the result. + OS::MemCopy(result + offset, line_buf, len * kCharSize); + offset += len; + } + ASSERT(result != NULL); + result[offset] = '\0'; + return result; +} + + +char* ReadCharsFromFile(FILE* file, + int* size, + int extra_space, + bool verbose, + const char* filename) { + if (file == NULL || fseek(file, 0, SEEK_END) != 0) { + if (verbose) { + OS::PrintError("Cannot read from file %s.\n", filename); + } + return NULL; + } + + // Get the size of the file and rewind it. + *size = ftell(file); + rewind(file); + + char* result = NewArray<char>(*size + extra_space); + for (int i = 0; i < *size && feof(file) == 0;) { + int read = static_cast<int>(fread(&result[i], 1, *size - i, file)); + if (read != (*size - i) && ferror(file) != 0) { + fclose(file); + DeleteArray(result); + return NULL; + } + i += read; + } + return result; +} + + +char* ReadCharsFromFile(const char* filename, + int* size, + int extra_space, + bool verbose) { + FILE* file = OS::FOpen(filename, "rb"); + char* result = ReadCharsFromFile(file, size, extra_space, verbose, filename); + if (file != NULL) fclose(file); + return result; +} + + +byte* ReadBytes(const char* filename, int* size, bool verbose) { + char* chars = ReadCharsFromFile(filename, size, 0, verbose); + return reinterpret_cast<byte*>(chars); +} + + +static Vector<const char> SetVectorContents(char* chars, + int size, + bool* exists) { + if (!chars) { + *exists = false; + return Vector<const char>::empty(); + } + chars[size] = '\0'; + *exists = true; + return Vector<const char>(chars, size); +} + + +Vector<const char> ReadFile(const char* filename, + bool* exists, + bool verbose) { + int size; + char* result = ReadCharsFromFile(filename, &size, 1, verbose); + return SetVectorContents(result, size, exists); +} + + +Vector<const char> ReadFile(FILE* file, + bool* exists, + bool verbose) { + int size; + char* result = ReadCharsFromFile(file, &size, 1, verbose, ""); + return SetVectorContents(result, size, exists); +} + + +int WriteCharsToFile(const char* str, int size, FILE* f) { + int total = 0; + while (total < size) { + int write = static_cast<int>(fwrite(str, 1, size - total, f)); + if (write == 0) { + return total; + } + total += write; + str += write; + } + return total; +} + + +int AppendChars(const char* filename, + const char* str, + int size, + bool verbose) { + FILE* f = OS::FOpen(filename, "ab"); + if (f == NULL) { + if (verbose) { + OS::PrintError("Cannot open file %s for writing.\n", filename); + } + return 0; + } + int written = WriteCharsToFile(str, size, f); + fclose(f); + return written; +} + + +int WriteChars(const char* filename, + const char* str, + int size, + bool verbose) { + FILE* f = OS::FOpen(filename, "wb"); + if (f == NULL) { + if (verbose) { + OS::PrintError("Cannot open file %s for writing.\n", filename); + } + return 0; + } + int written = WriteCharsToFile(str, size, f); + fclose(f); + return written; +} + + +int WriteBytes(const char* filename, + const byte* bytes, + int size, + bool verbose) { + const char* str = reinterpret_cast<const char*>(bytes); + return WriteChars(filename, str, size, verbose); +} + + + +void StringBuilder::AddFormatted(const char* format, ...) { + va_list arguments; + va_start(arguments, format); + AddFormattedList(format, arguments); + va_end(arguments); +} + + +void StringBuilder::AddFormattedList(const char* format, va_list list) { + ASSERT(!is_finalized() && position_ <= buffer_.length()); + int n = OS::VSNPrintF(buffer_ + position_, format, list); + if (n < 0 || n >= (buffer_.length() - position_)) { + position_ = buffer_.length(); + } else { + position_ += n; + } +} + + } } // namespace v8::internal diff --git a/deps/v8/src/utils.h b/deps/v8/src/utils.h index 753822614..115f78475 100644 --- a/deps/v8/src/utils.h +++ b/deps/v8/src/utils.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UTILS_H_ #define V8_UTILS_H_ @@ -31,11 +8,12 @@ #include <limits.h> #include <stdlib.h> #include <string.h> -#include <algorithm> #include "allocation.h" #include "checks.h" #include "globals.h" +#include "platform.h" +#include "vector.h" namespace v8 { namespace internal { @@ -43,10 +21,10 @@ namespace internal { // ---------------------------------------------------------------------------- // General helper functions -#define IS_POWER_OF_TWO(x) (((x) & ((x) - 1)) == 0) +#define IS_POWER_OF_TWO(x) ((x) != 0 && (((x) & ((x) - 1)) == 0)) -// Returns true iff x is a power of 2 (or zero). Cannot be used with the -// maximally negative value of the type T (the -1 overflows). +// Returns true iff x is a power of 2. Cannot be used with the maximally +// negative value of the type T (the -1 overflows). template <typename T> inline bool IsPowerOf2(T x) { return IS_POWER_OF_TWO(x); @@ -56,7 +34,6 @@ inline bool IsPowerOf2(T x) { // X must be a power of 2. Returns the number of trailing zeros. inline int WhichPowerOf2(uint32_t x) { ASSERT(IsPowerOf2(x)); - ASSERT(x != 0); int bits = 0; #ifdef DEBUG int original_x = x; @@ -250,13 +227,6 @@ T NegAbs(T a) { } -inline int StrLength(const char* string) { - size_t length = strlen(string); - ASSERT(length == static_cast<size_t>(static_cast<int>(length))); - return static_cast<int>(length); -} - - // TODO(svenpanne) Clean up the whole power-of-2 mess. inline int32_t WhichPowerOf2Abs(int32_t x) { return (x == kMinInt) ? 31 : WhichPowerOf2(Abs(x)); @@ -395,110 +365,6 @@ class Access { }; -template <typename T> -class Vector { - public: - Vector() : start_(NULL), length_(0) {} - Vector(T* data, int length) : start_(data), length_(length) { - ASSERT(length == 0 || (length > 0 && data != NULL)); - } - - static Vector<T> New(int length) { - return Vector<T>(NewArray<T>(length), length); - } - - // Returns a vector using the same backing storage as this one, - // spanning from and including 'from', to but not including 'to'. - Vector<T> SubVector(int from, int to) { - SLOW_ASSERT(to <= length_); - SLOW_ASSERT(from < to); - ASSERT(0 <= from); - return Vector<T>(start() + from, to - from); - } - - // Returns the length of the vector. - int length() const { return length_; } - - // Returns whether or not the vector is empty. - bool is_empty() const { return length_ == 0; } - - // Returns the pointer to the start of the data in the vector. - T* start() const { return start_; } - - // Access individual vector elements - checks bounds in debug mode. - T& operator[](int index) const { - ASSERT(0 <= index && index < length_); - return start_[index]; - } - - const T& at(int index) const { return operator[](index); } - - T& first() { return start_[0]; } - - T& last() { return start_[length_ - 1]; } - - // Returns a clone of this vector with a new backing store. - Vector<T> Clone() const { - T* result = NewArray<T>(length_); - for (int i = 0; i < length_; i++) result[i] = start_[i]; - return Vector<T>(result, length_); - } - - void Sort(int (*cmp)(const T*, const T*)) { - std::sort(start(), start() + length(), RawComparer(cmp)); - } - - void Sort() { - std::sort(start(), start() + length()); - } - - void Truncate(int length) { - ASSERT(length <= length_); - length_ = length; - } - - // Releases the array underlying this vector. Once disposed the - // vector is empty. - void Dispose() { - DeleteArray(start_); - start_ = NULL; - length_ = 0; - } - - inline Vector<T> operator+(int offset) { - ASSERT(offset < length_); - return Vector<T>(start_ + offset, length_ - offset); - } - - // Factory method for creating empty vectors. - static Vector<T> empty() { return Vector<T>(NULL, 0); } - - template<typename S> - static Vector<T> cast(Vector<S> input) { - return Vector<T>(reinterpret_cast<T*>(input.start()), - input.length() * sizeof(S) / sizeof(T)); - } - - protected: - void set_start(T* start) { start_ = start; } - - private: - T* start_; - int length_; - - class RawComparer { - public: - explicit RawComparer(int (*cmp)(const T*, const T*)) : cmp_(cmp) {} - bool operator()(const T& a, const T& b) { - return cmp_(&a, &b) < 0; - } - - private: - int (*cmp_)(const T*, const T*); - }; -}; - - // A pointer that can only be set once and doesn't allow NULL values. template<typename T> class SetOncePointer { @@ -536,16 +402,14 @@ class EmbeddedVector : public Vector<T> { // When copying, make underlying Vector to reference our buffer. EmbeddedVector(const EmbeddedVector& rhs) : Vector<T>(rhs) { - // TODO(jkummerow): Refactor #includes and use OS::MemCopy() instead. - memcpy(buffer_, rhs.buffer_, sizeof(T) * kSize); + OS::MemCopy(buffer_, rhs.buffer_, sizeof(T) * kSize); set_start(buffer_); } EmbeddedVector& operator=(const EmbeddedVector& rhs) { if (this == &rhs) return *this; Vector<T>::operator=(rhs); - // TODO(jkummerow): Refactor #includes and use OS::MemCopy() instead. - memcpy(buffer_, rhs.buffer_, sizeof(T) * kSize); + OS::MemCopy(buffer_, rhs.buffer_, sizeof(T) * kSize); this->set_start(buffer_); return *this; } @@ -555,44 +419,6 @@ class EmbeddedVector : public Vector<T> { }; -template <typename T> -class ScopedVector : public Vector<T> { - public: - explicit ScopedVector(int length) : Vector<T>(NewArray<T>(length), length) { } - ~ScopedVector() { - DeleteArray(this->start()); - } - - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(ScopedVector); -}; - -#define STATIC_ASCII_VECTOR(x) \ - v8::internal::Vector<const uint8_t>(reinterpret_cast<const uint8_t*>(x), \ - ARRAY_SIZE(x)-1) - -inline Vector<const char> CStrVector(const char* data) { - return Vector<const char>(data, StrLength(data)); -} - -inline Vector<const uint8_t> OneByteVector(const char* data, int length) { - return Vector<const uint8_t>(reinterpret_cast<const uint8_t*>(data), length); -} - -inline Vector<const uint8_t> OneByteVector(const char* data) { - return OneByteVector(data, StrLength(data)); -} - -inline Vector<char> MutableCStrVector(char* data) { - return Vector<char>(data, StrLength(data)); -} - -inline Vector<char> MutableCStrVector(char* data, int max) { - int length = StrLength(data); - return Vector<char>(data, (length < max) ? length : max); -} - - /* * A class that collects values into a backing store. * Specialized versions of the class can allow access to the backing store @@ -920,7 +746,6 @@ struct BitCastHelper { INLINE(static Dest cast(const Source& source)) { Dest dest; - // TODO(jkummerow): Refactor #includes and use OS::MemCopy() instead. memcpy(&dest, &source, sizeof(dest)); return dest; } @@ -1206,6 +1031,447 @@ class ContainerPointerWrapper { C* container_; }; + +// ---------------------------------------------------------------------------- +// I/O support. + +#if __GNUC__ >= 4 +// On gcc we can ask the compiler to check the types of %d-style format +// specifiers and their associated arguments. TODO(erikcorry) fix this +// so it works on MacOSX. +#if defined(__MACH__) && defined(__APPLE__) +#define PRINTF_CHECKING +#define FPRINTF_CHECKING +#define PRINTF_METHOD_CHECKING +#define FPRINTF_METHOD_CHECKING +#else // MacOsX. +#define PRINTF_CHECKING __attribute__ ((format (printf, 1, 2))) +#define FPRINTF_CHECKING __attribute__ ((format (printf, 2, 3))) +#define PRINTF_METHOD_CHECKING __attribute__ ((format (printf, 2, 3))) +#define FPRINTF_METHOD_CHECKING __attribute__ ((format (printf, 3, 4))) +#endif +#else +#define PRINTF_CHECKING +#define FPRINTF_CHECKING +#define PRINTF_METHOD_CHECKING +#define FPRINTF_METHOD_CHECKING +#endif + +// Our version of printf(). +void PRINTF_CHECKING PrintF(const char* format, ...); +void FPRINTF_CHECKING PrintF(FILE* out, const char* format, ...); + +// Prepends the current process ID to the output. +void PRINTF_CHECKING PrintPID(const char* format, ...); + +// Our version of fflush. +void Flush(FILE* out); + +inline void Flush() { + Flush(stdout); +} + + +// Read a line of characters after printing the prompt to stdout. The resulting +// char* needs to be disposed off with DeleteArray by the caller. +char* ReadLine(const char* prompt); + + +// Read and return the raw bytes in a file. the size of the buffer is returned +// in size. +// The returned buffer must be freed by the caller. +byte* ReadBytes(const char* filename, int* size, bool verbose = true); + + +// Append size chars from str to the file given by filename. +// The file is overwritten. Returns the number of chars written. +int AppendChars(const char* filename, + const char* str, + int size, + bool verbose = true); + + +// Write size chars from str to the file given by filename. +// The file is overwritten. Returns the number of chars written. +int WriteChars(const char* filename, + const char* str, + int size, + bool verbose = true); + + +// Write size bytes to the file given by filename. +// The file is overwritten. Returns the number of bytes written. +int WriteBytes(const char* filename, + const byte* bytes, + int size, + bool verbose = true); + + +// Write the C code +// const char* <varname> = "<str>"; +// const int <varname>_len = <len>; +// to the file given by filename. Only the first len chars are written. +int WriteAsCFile(const char* filename, const char* varname, + const char* str, int size, bool verbose = true); + + +// ---------------------------------------------------------------------------- +// Data structures + +template <typename T> +inline Vector< Handle<Object> > HandleVector(v8::internal::Handle<T>* elms, + int length) { + return Vector< Handle<Object> >( + reinterpret_cast<v8::internal::Handle<Object>*>(elms), length); +} + + +// ---------------------------------------------------------------------------- +// Memory + +// Copies words from |src| to |dst|. The data spans must not overlap. +template <typename T> +inline void CopyWords(T* dst, const T* src, size_t num_words) { + STATIC_ASSERT(sizeof(T) == kPointerSize); + // TODO(mvstanton): disabled because mac builds are bogus failing on this + // assert. They are doing a signed comparison. Investigate in + // the morning. + // ASSERT(Min(dst, const_cast<T*>(src)) + num_words <= + // Max(dst, const_cast<T*>(src))); + ASSERT(num_words > 0); + + // Use block copying OS::MemCopy if the segment we're copying is + // enough to justify the extra call/setup overhead. + static const size_t kBlockCopyLimit = 16; + + if (num_words < kBlockCopyLimit) { + do { + num_words--; + *dst++ = *src++; + } while (num_words > 0); + } else { + OS::MemCopy(dst, src, num_words * kPointerSize); + } +} + + +// Copies words from |src| to |dst|. No restrictions. +template <typename T> +inline void MoveWords(T* dst, const T* src, size_t num_words) { + STATIC_ASSERT(sizeof(T) == kPointerSize); + ASSERT(num_words > 0); + + // Use block copying OS::MemCopy if the segment we're copying is + // enough to justify the extra call/setup overhead. + static const size_t kBlockCopyLimit = 16; + + if (num_words < kBlockCopyLimit && + ((dst < src) || (dst >= (src + num_words * kPointerSize)))) { + T* end = dst + num_words; + do { + num_words--; + *dst++ = *src++; + } while (num_words > 0); + } else { + OS::MemMove(dst, src, num_words * kPointerSize); + } +} + + +// Copies data from |src| to |dst|. The data spans must not overlap. +template <typename T> +inline void CopyBytes(T* dst, const T* src, size_t num_bytes) { + STATIC_ASSERT(sizeof(T) == 1); + ASSERT(Min(dst, const_cast<T*>(src)) + num_bytes <= + Max(dst, const_cast<T*>(src))); + if (num_bytes == 0) return; + + // Use block copying OS::MemCopy if the segment we're copying is + // enough to justify the extra call/setup overhead. + static const int kBlockCopyLimit = OS::kMinComplexMemCopy; + + if (num_bytes < static_cast<size_t>(kBlockCopyLimit)) { + do { + num_bytes--; + *dst++ = *src++; + } while (num_bytes > 0); + } else { + OS::MemCopy(dst, src, num_bytes); + } +} + + +template <typename T, typename U> +inline void MemsetPointer(T** dest, U* value, int counter) { +#ifdef DEBUG + T* a = NULL; + U* b = NULL; + a = b; // Fake assignment to check assignability. + USE(a); +#endif // DEBUG +#if V8_HOST_ARCH_IA32 +#define STOS "stosl" +#elif V8_HOST_ARCH_X64 +#define STOS "stosq" +#endif +#if defined(__native_client__) + // This STOS sequence does not validate for x86_64 Native Client. + // Here we #undef STOS to force use of the slower C version. + // TODO(bradchen): Profile V8 and implement a faster REP STOS + // here if the profile indicates it matters. +#undef STOS +#endif + +#if defined(MEMORY_SANITIZER) + // MemorySanitizer does not understand inline assembly. +#undef STOS +#endif + +#if defined(__GNUC__) && defined(STOS) + asm volatile( + "cld;" + "rep ; " STOS + : "+&c" (counter), "+&D" (dest) + : "a" (value) + : "memory", "cc"); +#else + for (int i = 0; i < counter; i++) { + dest[i] = value; + } +#endif + +#undef STOS +} + + +// Simple wrapper that allows an ExternalString to refer to a +// Vector<const char>. Doesn't assume ownership of the data. +class AsciiStringAdapter: public v8::String::ExternalAsciiStringResource { + public: + explicit AsciiStringAdapter(Vector<const char> data) : data_(data) {} + + virtual const char* data() const { return data_.start(); } + + virtual size_t length() const { return data_.length(); } + + private: + Vector<const char> data_; +}; + + +// Simple support to read a file into a 0-terminated C-string. +// The returned buffer must be freed by the caller. +// On return, *exits tells whether the file existed. +Vector<const char> ReadFile(const char* filename, + bool* exists, + bool verbose = true); +Vector<const char> ReadFile(FILE* file, + bool* exists, + bool verbose = true); + + +template <typename sourcechar, typename sinkchar> +INLINE(static void CopyCharsUnsigned(sinkchar* dest, + const sourcechar* src, + int chars)); +#if defined(V8_HOST_ARCH_ARM) +INLINE(void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars)); +INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint8_t* src, int chars)); +INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars)); +#elif defined(V8_HOST_ARCH_MIPS) +INLINE(void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars)); +INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars)); +#endif + +// Copy from ASCII/16bit chars to ASCII/16bit chars. +template <typename sourcechar, typename sinkchar> +INLINE(void CopyChars(sinkchar* dest, const sourcechar* src, int chars)); + +template<typename sourcechar, typename sinkchar> +void CopyChars(sinkchar* dest, const sourcechar* src, int chars) { + ASSERT(sizeof(sourcechar) <= 2); + ASSERT(sizeof(sinkchar) <= 2); + if (sizeof(sinkchar) == 1) { + if (sizeof(sourcechar) == 1) { + CopyCharsUnsigned(reinterpret_cast<uint8_t*>(dest), + reinterpret_cast<const uint8_t*>(src), + chars); + } else { + CopyCharsUnsigned(reinterpret_cast<uint8_t*>(dest), + reinterpret_cast<const uint16_t*>(src), + chars); + } + } else { + if (sizeof(sourcechar) == 1) { + CopyCharsUnsigned(reinterpret_cast<uint16_t*>(dest), + reinterpret_cast<const uint8_t*>(src), + chars); + } else { + CopyCharsUnsigned(reinterpret_cast<uint16_t*>(dest), + reinterpret_cast<const uint16_t*>(src), + chars); + } + } +} + +template <typename sourcechar, typename sinkchar> +void CopyCharsUnsigned(sinkchar* dest, const sourcechar* src, int chars) { + sinkchar* limit = dest + chars; +#ifdef V8_HOST_CAN_READ_UNALIGNED + if (sizeof(*dest) == sizeof(*src)) { + if (chars >= static_cast<int>(OS::kMinComplexMemCopy / sizeof(*dest))) { + OS::MemCopy(dest, src, chars * sizeof(*dest)); + return; + } + // Number of characters in a uintptr_t. + static const int kStepSize = sizeof(uintptr_t) / sizeof(*dest); // NOLINT + ASSERT(dest + kStepSize > dest); // Check for overflow. + while (dest + kStepSize <= limit) { + *reinterpret_cast<uintptr_t*>(dest) = + *reinterpret_cast<const uintptr_t*>(src); + dest += kStepSize; + src += kStepSize; + } + } +#endif + while (dest < limit) { + *dest++ = static_cast<sinkchar>(*src++); + } +} + + +#if defined(V8_HOST_ARCH_ARM) +void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars) { + switch (static_cast<unsigned>(chars)) { + case 0: + break; + case 1: + *dest = *src; + break; + case 2: + memcpy(dest, src, 2); + break; + case 3: + memcpy(dest, src, 3); + break; + case 4: + memcpy(dest, src, 4); + break; + case 5: + memcpy(dest, src, 5); + break; + case 6: + memcpy(dest, src, 6); + break; + case 7: + memcpy(dest, src, 7); + break; + case 8: + memcpy(dest, src, 8); + break; + case 9: + memcpy(dest, src, 9); + break; + case 10: + memcpy(dest, src, 10); + break; + case 11: + memcpy(dest, src, 11); + break; + case 12: + memcpy(dest, src, 12); + break; + case 13: + memcpy(dest, src, 13); + break; + case 14: + memcpy(dest, src, 14); + break; + case 15: + memcpy(dest, src, 15); + break; + default: + OS::MemCopy(dest, src, chars); + break; + } +} + + +void CopyCharsUnsigned(uint16_t* dest, const uint8_t* src, int chars) { + if (chars >= OS::kMinComplexConvertMemCopy) { + OS::MemCopyUint16Uint8(dest, src, chars); + } else { + OS::MemCopyUint16Uint8Wrapper(dest, src, chars); + } +} + + +void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars) { + switch (static_cast<unsigned>(chars)) { + case 0: + break; + case 1: + *dest = *src; + break; + case 2: + memcpy(dest, src, 4); + break; + case 3: + memcpy(dest, src, 6); + break; + case 4: + memcpy(dest, src, 8); + break; + case 5: + memcpy(dest, src, 10); + break; + case 6: + memcpy(dest, src, 12); + break; + case 7: + memcpy(dest, src, 14); + break; + default: + OS::MemCopy(dest, src, chars * sizeof(*dest)); + break; + } +} + + +#elif defined(V8_HOST_ARCH_MIPS) +void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars) { + if (chars < OS::kMinComplexMemCopy) { + memcpy(dest, src, chars); + } else { + OS::MemCopy(dest, src, chars); + } +} + +void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars) { + if (chars < OS::kMinComplexMemCopy) { + memcpy(dest, src, chars * sizeof(*dest)); + } else { + OS::MemCopy(dest, src, chars * sizeof(*dest)); + } +} +#endif + + +class StringBuilder : public SimpleStringBuilder { + public: + explicit StringBuilder(int size) : SimpleStringBuilder(size) { } + StringBuilder(char* buffer, int size) : SimpleStringBuilder(buffer, size) { } + + // Add formatted contents to the builder just like printf(). + void AddFormatted(const char* format, ...); + + // Add formatted contents like printf based on a va_list. + void AddFormattedList(const char* format, va_list list); + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(StringBuilder); +}; + + } } // namespace v8::internal #endif // V8_UTILS_H_ diff --git a/deps/v8/src/utils/random-number-generator.cc b/deps/v8/src/utils/random-number-generator.cc index d40102f91..92114b0e2 100644 --- a/deps/v8/src/utils/random-number-generator.cc +++ b/deps/v8/src/utils/random-number-generator.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "utils/random-number-generator.h" diff --git a/deps/v8/src/utils/random-number-generator.h b/deps/v8/src/utils/random-number-generator.h index cc7d7395e..331cffae7 100644 --- a/deps/v8/src/utils/random-number-generator.h +++ b/deps/v8/src/utils/random-number-generator.h @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_UTILS_RANDOM_NUMBER_GENERATOR_H_ #define V8_UTILS_RANDOM_NUMBER_GENERATOR_H_ diff --git a/deps/v8/src/v8-counters.cc b/deps/v8/src/v8-counters.cc deleted file mode 100644 index c899b289a..000000000 --- a/deps/v8/src/v8-counters.cc +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "v8.h" - -#include "v8-counters.h" - -namespace v8 { -namespace internal { - -Counters::Counters(Isolate* isolate) { -#define HT(name, caption) \ - name##_ = HistogramTimer(#caption, 0, 10000, 50, isolate); - HISTOGRAM_TIMER_LIST(HT) -#undef HT - -#define HP(name, caption) \ - name##_ = Histogram(#caption, 0, 101, 100, isolate); - HISTOGRAM_PERCENTAGE_LIST(HP) -#undef HP - -#define HM(name, caption) \ - name##_ = Histogram(#caption, 1000, 500000, 50, isolate); - HISTOGRAM_MEMORY_LIST(HM) -#undef HM - -#define SC(name, caption) \ - name##_ = StatsCounter(isolate, "c:" #caption); - - STATS_COUNTER_LIST_1(SC) - STATS_COUNTER_LIST_2(SC) -#undef SC - -#define SC(name) \ - count_of_##name##_ = StatsCounter(isolate, "c:" "V8.CountOf_" #name); \ - size_of_##name##_ = StatsCounter(isolate, "c:" "V8.SizeOf_" #name); - INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - count_of_CODE_TYPE_##name##_ = \ - StatsCounter(isolate, "c:" "V8.CountOf_CODE_TYPE-" #name); \ - size_of_CODE_TYPE_##name##_ = \ - StatsCounter(isolate, "c:" "V8.SizeOf_CODE_TYPE-" #name); - CODE_KIND_LIST(SC) -#undef SC - -#define SC(name) \ - count_of_FIXED_ARRAY_##name##_ = \ - StatsCounter(isolate, "c:" "V8.CountOf_FIXED_ARRAY-" #name); \ - size_of_FIXED_ARRAY_##name##_ = \ - StatsCounter(isolate, "c:" "V8.SizeOf_FIXED_ARRAY-" #name); - FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - count_of_CODE_AGE_##name##_ = \ - StatsCounter(isolate, "c:" "V8.CountOf_CODE_AGE-" #name); \ - size_of_CODE_AGE_##name##_ = \ - StatsCounter(isolate, "c:" "V8.SizeOf_CODE_AGE-" #name); - CODE_AGE_LIST_COMPLETE(SC) -#undef SC -} - - -void Counters::ResetHistograms() { -#define HT(name, caption) name##_.Reset(); - HISTOGRAM_TIMER_LIST(HT) -#undef HT - -#define HP(name, caption) name##_.Reset(); - HISTOGRAM_PERCENTAGE_LIST(HP) -#undef HP - -#define HM(name, caption) name##_.Reset(); - HISTOGRAM_MEMORY_LIST(HM) -#undef HM -} - -} } // namespace v8::internal diff --git a/deps/v8/src/v8-counters.h b/deps/v8/src/v8-counters.h deleted file mode 100644 index 0bd495510..000000000 --- a/deps/v8/src/v8-counters.h +++ /dev/null @@ -1,435 +0,0 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef V8_V8_COUNTERS_H_ -#define V8_V8_COUNTERS_H_ - -#include "allocation.h" -#include "counters.h" -#include "objects.h" -#include "v8globals.h" - -namespace v8 { -namespace internal { - -#define HISTOGRAM_TIMER_LIST(HT) \ - /* Garbage collection timers. */ \ - HT(gc_compactor, V8.GCCompactor) \ - HT(gc_scavenger, V8.GCScavenger) \ - HT(gc_context, V8.GCContext) /* GC context cleanup time */ \ - /* Parsing timers. */ \ - HT(parse, V8.Parse) \ - HT(parse_lazy, V8.ParseLazy) \ - HT(pre_parse, V8.PreParse) \ - /* Total compilation times. */ \ - HT(compile, V8.Compile) \ - HT(compile_eval, V8.CompileEval) \ - HT(compile_lazy, V8.CompileLazy) - -#define HISTOGRAM_PERCENTAGE_LIST(HP) \ - /* Heap fragmentation. */ \ - HP(external_fragmentation_total, \ - V8.MemoryExternalFragmentationTotal) \ - HP(external_fragmentation_old_pointer_space, \ - V8.MemoryExternalFragmentationOldPointerSpace) \ - HP(external_fragmentation_old_data_space, \ - V8.MemoryExternalFragmentationOldDataSpace) \ - HP(external_fragmentation_code_space, \ - V8.MemoryExternalFragmentationCodeSpace) \ - HP(external_fragmentation_map_space, \ - V8.MemoryExternalFragmentationMapSpace) \ - HP(external_fragmentation_cell_space, \ - V8.MemoryExternalFragmentationCellSpace) \ - HP(external_fragmentation_property_cell_space, \ - V8.MemoryExternalFragmentationPropertyCellSpace) \ - HP(external_fragmentation_lo_space, \ - V8.MemoryExternalFragmentationLoSpace) \ - /* Percentages of heap committed to each space. */ \ - HP(heap_fraction_new_space, \ - V8.MemoryHeapFractionNewSpace) \ - HP(heap_fraction_old_pointer_space, \ - V8.MemoryHeapFractionOldPointerSpace) \ - HP(heap_fraction_old_data_space, \ - V8.MemoryHeapFractionOldDataSpace) \ - HP(heap_fraction_code_space, \ - V8.MemoryHeapFractionCodeSpace) \ - HP(heap_fraction_map_space, \ - V8.MemoryHeapFractionMapSpace) \ - HP(heap_fraction_cell_space, \ - V8.MemoryHeapFractionCellSpace) \ - HP(heap_fraction_property_cell_space, \ - V8.MemoryHeapFractionPropertyCellSpace) \ - HP(heap_fraction_lo_space, \ - V8.MemoryHeapFractionLoSpace) \ - /* Percentage of crankshafted codegen. */ \ - HP(codegen_fraction_crankshaft, \ - V8.CodegenFractionCrankshaft) \ - - -#define HISTOGRAM_MEMORY_LIST(HM) \ - HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \ - HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \ - HM(heap_sample_map_space_committed, \ - V8.MemoryHeapSampleMapSpaceCommitted) \ - HM(heap_sample_cell_space_committed, \ - V8.MemoryHeapSampleCellSpaceCommitted) \ - HM(heap_sample_property_cell_space_committed, \ - V8.MemoryHeapSamplePropertyCellSpaceCommitted) \ - HM(heap_sample_code_space_committed, \ - V8.MemoryHeapSampleCodeSpaceCommitted) \ - HM(heap_sample_maximum_committed, \ - V8.MemoryHeapSampleMaximumCommitted) \ - - -// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC -// Intellisense to crash. It was broken into two macros (each of length 40 -// lines) rather than one macro (of length about 80 lines) to work around -// this problem. Please avoid using recursive macros of this length when -// possible. -#define STATS_COUNTER_LIST_1(SC) \ - /* Global Handle Count*/ \ - SC(global_handles, V8.GlobalHandles) \ - /* OS Memory allocated */ \ - SC(memory_allocated, V8.OsMemoryAllocated) \ - SC(normalized_maps, V8.NormalizedMaps) \ - SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \ - SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \ - SC(alive_after_last_gc, V8.AliveAfterLastGC) \ - SC(objs_since_last_young, V8.ObjsSinceLastYoung) \ - SC(objs_since_last_full, V8.ObjsSinceLastFull) \ - SC(string_table_capacity, V8.StringTableCapacity) \ - SC(number_of_symbols, V8.NumberOfSymbols) \ - SC(script_wrappers, V8.ScriptWrappers) \ - SC(call_initialize_stubs, V8.CallInitializeStubs) \ - SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \ - SC(call_normal_stubs, V8.CallNormalStubs) \ - SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \ - SC(arguments_adaptors, V8.ArgumentsAdaptors) \ - SC(compilation_cache_hits, V8.CompilationCacheHits) \ - SC(compilation_cache_misses, V8.CompilationCacheMisses) \ - SC(string_ctor_calls, V8.StringConstructorCalls) \ - SC(string_ctor_conversions, V8.StringConstructorConversions) \ - SC(string_ctor_cached_number, V8.StringConstructorCachedNumber) \ - SC(string_ctor_string_value, V8.StringConstructorStringValue) \ - SC(string_ctor_gc_required, V8.StringConstructorGCRequired) \ - /* Amount of evaled source code. */ \ - SC(total_eval_size, V8.TotalEvalSize) \ - /* Amount of loaded source code. */ \ - SC(total_load_size, V8.TotalLoadSize) \ - /* Amount of parsed source code. */ \ - SC(total_parse_size, V8.TotalParseSize) \ - /* Amount of source code skipped over using preparsing. */ \ - SC(total_preparse_skipped, V8.TotalPreparseSkipped) \ - /* Number of symbol lookups skipped using preparsing */ \ - SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped) \ - /* Amount of compiled source code. */ \ - SC(total_compile_size, V8.TotalCompileSize) \ - /* Amount of source code compiled with the full codegen. */ \ - SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \ - /* Number of contexts created from scratch. */ \ - SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \ - /* Number of contexts created by partial snapshot. */ \ - SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \ - /* Number of code objects found from pc. */ \ - SC(pc_to_code, V8.PcToCode) \ - SC(pc_to_code_cached, V8.PcToCodeCached) \ - /* The store-buffer implementation of the write barrier. */ \ - SC(store_buffer_compactions, V8.StoreBufferCompactions) \ - SC(store_buffer_overflows, V8.StoreBufferOverflows) - - -#define STATS_COUNTER_LIST_2(SC) \ - /* Number of code stubs. */ \ - SC(code_stubs, V8.CodeStubs) \ - /* Amount of stub code. */ \ - SC(total_stubs_code_size, V8.TotalStubsCodeSize) \ - /* Amount of (JS) compiled code. */ \ - SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \ - SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \ - SC(gc_compactor_caused_by_promoted_data, \ - V8.GCCompactorCausedByPromotedData) \ - SC(gc_compactor_caused_by_oldspace_exhaustion, \ - V8.GCCompactorCausedByOldspaceExhaustion) \ - SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \ - SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \ - /* How is the generic keyed-load stub used? */ \ - SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \ - SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \ - SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \ - SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \ - SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \ - SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \ - /* How is the generic keyed-call stub used? */ \ - SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \ - SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \ - SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \ - SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \ - SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \ - SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \ - SC(named_load_global_stub, V8.NamedLoadGlobalStub) \ - SC(named_store_global_inline, V8.NamedStoreGlobalInline) \ - SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \ - SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \ - SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \ - SC(store_normal_miss, V8.StoreNormalMiss) \ - SC(store_normal_hit, V8.StoreNormalHit) \ - SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \ - SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \ - SC(cow_arrays_converted, V8.COWArraysConverted) \ - SC(call_miss, V8.CallMiss) \ - SC(keyed_call_miss, V8.KeyedCallMiss) \ - SC(load_miss, V8.LoadMiss) \ - SC(keyed_load_miss, V8.KeyedLoadMiss) \ - SC(call_const, V8.CallConst) \ - SC(call_const_fast_api, V8.CallConstFastApi) \ - SC(call_const_interceptor, V8.CallConstInterceptor) \ - SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \ - SC(call_global_inline, V8.CallGlobalInline) \ - SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \ - SC(constructed_objects, V8.ConstructedObjects) \ - SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \ - SC(negative_lookups, V8.NegativeLookups) \ - SC(negative_lookups_miss, V8.NegativeLookupsMiss) \ - SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \ - SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \ - SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \ - SC(array_function_runtime, V8.ArrayFunctionRuntime) \ - SC(array_function_native, V8.ArrayFunctionNative) \ - SC(for_in, V8.ForIn) \ - SC(enum_cache_hits, V8.EnumCacheHits) \ - SC(enum_cache_misses, V8.EnumCacheMisses) \ - SC(zone_segment_bytes, V8.ZoneSegmentBytes) \ - SC(fast_new_closure_total, V8.FastNewClosureTotal) \ - SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \ - SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \ - SC(string_add_runtime, V8.StringAddRuntime) \ - SC(string_add_native, V8.StringAddNative) \ - SC(string_add_runtime_ext_to_ascii, V8.StringAddRuntimeExtToAscii) \ - SC(sub_string_runtime, V8.SubStringRuntime) \ - SC(sub_string_native, V8.SubStringNative) \ - SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \ - SC(string_compare_native, V8.StringCompareNative) \ - SC(string_compare_runtime, V8.StringCompareRuntime) \ - SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \ - SC(regexp_entry_native, V8.RegExpEntryNative) \ - SC(number_to_string_native, V8.NumberToStringNative) \ - SC(number_to_string_runtime, V8.NumberToStringRuntime) \ - SC(math_acos, V8.MathAcos) \ - SC(math_asin, V8.MathAsin) \ - SC(math_atan, V8.MathAtan) \ - SC(math_atan2, V8.MathAtan2) \ - SC(math_exp, V8.MathExp) \ - SC(math_floor, V8.MathFloor) \ - SC(math_log, V8.MathLog) \ - SC(math_pow, V8.MathPow) \ - SC(math_round, V8.MathRound) \ - SC(math_sqrt, V8.MathSqrt) \ - SC(stack_interrupts, V8.StackInterrupts) \ - SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \ - SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \ - SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \ - SC(soft_deopts_requested, V8.SoftDeoptsRequested) \ - SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \ - SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \ - /* Number of write barriers in generated code. */ \ - SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \ - SC(write_barriers_static, V8.WriteBarriersStatic) \ - SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \ - SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \ - SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \ - SC(old_pointer_space_bytes_available, \ - V8.MemoryOldPointerSpaceBytesAvailable) \ - SC(old_pointer_space_bytes_committed, \ - V8.MemoryOldPointerSpaceBytesCommitted) \ - SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \ - SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \ - SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \ - SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \ - SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \ - SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \ - SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \ - SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \ - SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \ - SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \ - SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \ - SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \ - SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \ - SC(property_cell_space_bytes_available, \ - V8.MemoryPropertyCellSpaceBytesAvailable) \ - SC(property_cell_space_bytes_committed, \ - V8.MemoryPropertyCellSpaceBytesCommitted) \ - SC(property_cell_space_bytes_used, \ - V8.MemoryPropertyCellSpaceBytesUsed) \ - SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \ - SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \ - SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) - - -// This file contains all the v8 counters that are in use. -class Counters { - public: -#define HT(name, caption) \ - HistogramTimer* name() { return &name##_; } - HISTOGRAM_TIMER_LIST(HT) -#undef HT - -#define HP(name, caption) \ - Histogram* name() { return &name##_; } - HISTOGRAM_PERCENTAGE_LIST(HP) -#undef HP - -#define HM(name, caption) \ - Histogram* name() { return &name##_; } - HISTOGRAM_MEMORY_LIST(HM) -#undef HM - -#define SC(name, caption) \ - StatsCounter* name() { return &name##_; } - STATS_COUNTER_LIST_1(SC) - STATS_COUNTER_LIST_2(SC) -#undef SC - -#define SC(name) \ - StatsCounter* count_of_##name() { return &count_of_##name##_; } \ - StatsCounter* size_of_##name() { return &size_of_##name##_; } - INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter* count_of_CODE_TYPE_##name() \ - { return &count_of_CODE_TYPE_##name##_; } \ - StatsCounter* size_of_CODE_TYPE_##name() \ - { return &size_of_CODE_TYPE_##name##_; } - CODE_KIND_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter* count_of_FIXED_ARRAY_##name() \ - { return &count_of_FIXED_ARRAY_##name##_; } \ - StatsCounter* size_of_FIXED_ARRAY_##name() \ - { return &size_of_FIXED_ARRAY_##name##_; } - FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter* count_of_CODE_AGE_##name() \ - { return &count_of_CODE_AGE_##name##_; } \ - StatsCounter* size_of_CODE_AGE_##name() \ - { return &size_of_CODE_AGE_##name##_; } - CODE_AGE_LIST_COMPLETE(SC) -#undef SC - - enum Id { -#define RATE_ID(name, caption) k_##name, - HISTOGRAM_TIMER_LIST(RATE_ID) -#undef RATE_ID -#define PERCENTAGE_ID(name, caption) k_##name, - HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID) -#undef PERCENTAGE_ID -#define MEMORY_ID(name, caption) k_##name, - HISTOGRAM_MEMORY_LIST(MEMORY_ID) -#undef MEMORY_ID -#define COUNTER_ID(name, caption) k_##name, - STATS_COUNTER_LIST_1(COUNTER_ID) - STATS_COUNTER_LIST_2(COUNTER_ID) -#undef COUNTER_ID -#define COUNTER_ID(name) kCountOf##name, kSizeOf##name, - INSTANCE_TYPE_LIST(COUNTER_ID) -#undef COUNTER_ID -#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \ - kSizeOfCODE_TYPE_##name, - CODE_KIND_LIST(COUNTER_ID) -#undef COUNTER_ID -#define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \ - kSizeOfFIXED_ARRAY__##name, - FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID) -#undef COUNTER_ID -#define COUNTER_ID(name) kCountOfCODE_AGE__##name, \ - kSizeOfCODE_AGE__##name, - CODE_AGE_LIST_COMPLETE(COUNTER_ID) -#undef COUNTER_ID - stats_counter_count - }; - - void ResetHistograms(); - - private: -#define HT(name, caption) \ - HistogramTimer name##_; - HISTOGRAM_TIMER_LIST(HT) -#undef HT - -#define HP(name, caption) \ - Histogram name##_; - HISTOGRAM_PERCENTAGE_LIST(HP) -#undef HP - -#define HM(name, caption) \ - Histogram name##_; - HISTOGRAM_MEMORY_LIST(HM) -#undef HM - -#define SC(name, caption) \ - StatsCounter name##_; - STATS_COUNTER_LIST_1(SC) - STATS_COUNTER_LIST_2(SC) -#undef SC - -#define SC(name) \ - StatsCounter size_of_##name##_; \ - StatsCounter count_of_##name##_; - INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter size_of_CODE_TYPE_##name##_; \ - StatsCounter count_of_CODE_TYPE_##name##_; - CODE_KIND_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter size_of_FIXED_ARRAY_##name##_; \ - StatsCounter count_of_FIXED_ARRAY_##name##_; - FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) -#undef SC - -#define SC(name) \ - StatsCounter size_of_CODE_AGE_##name##_; \ - StatsCounter count_of_CODE_AGE_##name##_; - CODE_AGE_LIST_COMPLETE(SC) -#undef SC - - friend class Isolate; - - explicit Counters(Isolate* isolate); - - DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); -}; - -} } // namespace v8::internal - -#endif // V8_V8_COUNTERS_H_ diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc index b49e0eb5f..f8156ecbd 100644 --- a/deps/v8/src/v8.cc +++ b/deps/v8/src/v8.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -53,29 +30,14 @@ namespace internal { V8_DECLARE_ONCE(init_once); -List<CallCompletedCallback>* V8::call_completed_callbacks_ = NULL; v8::ArrayBuffer::Allocator* V8::array_buffer_allocator_ = NULL; v8::Platform* V8::platform_ = NULL; bool V8::Initialize(Deserializer* des) { InitializeOncePerProcess(); - - // The current thread may not yet had entered an isolate to run. - // Note the Isolate::Current() may be non-null because for various - // initialization purposes an initializing thread may be assigned an isolate - // but not actually enter it. - if (i::Isolate::CurrentPerIsolateThreadData() == NULL) { - i::Isolate::EnterDefaultIsolate(); - } - - ASSERT(i::Isolate::CurrentPerIsolateThreadData() != NULL); - ASSERT(i::Isolate::CurrentPerIsolateThreadData()->thread_id().Equals( - i::ThreadId::Current())); - ASSERT(i::Isolate::CurrentPerIsolateThreadData()->isolate() == - i::Isolate::Current()); - - Isolate* isolate = Isolate::Current(); + Isolate* isolate = Isolate::UncheckedCurrent(); + if (isolate == NULL) return true; if (isolate->IsDead()) return false; if (isolate->IsInitialized()) return true; @@ -108,10 +70,8 @@ void V8::TearDown() { RegisteredExtension::UnregisterAll(); Isolate::GlobalTearDown(); - delete call_completed_callbacks_; - call_completed_callbacks_ = NULL; - Sampler::TearDown(); + Serializer::TearDown(); #ifdef V8_USE_DEFAULT_PLATFORM DefaultPlatform* platform = static_cast<DefaultPlatform*>(platform_); @@ -127,65 +87,9 @@ void V8::SetReturnAddressLocationResolver( } -void V8::AddCallCompletedCallback(CallCompletedCallback callback) { - if (call_completed_callbacks_ == NULL) { // Lazy init. - call_completed_callbacks_ = new List<CallCompletedCallback>(); - } - for (int i = 0; i < call_completed_callbacks_->length(); i++) { - if (callback == call_completed_callbacks_->at(i)) return; - } - call_completed_callbacks_->Add(callback); -} - - -void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) { - if (call_completed_callbacks_ == NULL) return; - for (int i = 0; i < call_completed_callbacks_->length(); i++) { - if (callback == call_completed_callbacks_->at(i)) { - call_completed_callbacks_->Remove(i); - } - } -} - - -void V8::FireCallCompletedCallback(Isolate* isolate) { - bool has_call_completed_callbacks = call_completed_callbacks_ != NULL; - bool run_microtasks = isolate->autorun_microtasks() && - isolate->microtask_pending(); - if (!has_call_completed_callbacks && !run_microtasks) return; - - HandleScopeImplementer* handle_scope_implementer = - isolate->handle_scope_implementer(); - if (!handle_scope_implementer->CallDepthIsZero()) return; - // Fire callbacks. Increase call depth to prevent recursive callbacks. - handle_scope_implementer->IncrementCallDepth(); - if (run_microtasks) Execution::RunMicrotasks(isolate); - if (has_call_completed_callbacks) { - for (int i = 0; i < call_completed_callbacks_->length(); i++) { - call_completed_callbacks_->at(i)(); - } - } - handle_scope_implementer->DecrementCallDepth(); -} - - -void V8::RunMicrotasks(Isolate* isolate) { - if (!isolate->microtask_pending()) - return; - - HandleScopeImplementer* handle_scope_implementer = - isolate->handle_scope_implementer(); - ASSERT(handle_scope_implementer->CallDepthIsZero()); - - // Increase call depth to prevent recursive callbacks. - handle_scope_implementer->IncrementCallDepth(); - Execution::RunMicrotasks(isolate); - handle_scope_implementer->DecrementCallDepth(); -} - - void V8::InitializeOncePerProcessImpl() { FlagList::EnforceFlagImplications(); + Serializer::InitializeOncePerProcess(); if (FLAG_predictable && FLAG_random_seed == 0) { // Avoid random seeds in predictable mode. @@ -195,15 +99,23 @@ void V8::InitializeOncePerProcessImpl() { if (FLAG_stress_compaction) { FLAG_force_marking_deque_overflows = true; FLAG_gc_global = true; - FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2; + FLAG_max_new_space_size = 2 * Page::kPageSize; } #ifdef V8_USE_DEFAULT_PLATFORM platform_ = new DefaultPlatform; #endif Sampler::SetUp(); - CPU::SetUp(); + // TODO(svenpanne) Clean this up when Serializer is a real object. + bool serializer_enabled = Serializer::enabled(NULL); + CpuFeatures::Probe(serializer_enabled); OS::PostSetUp(); + // The custom exp implementation needs 16KB of lookup data; initialize it + // on demand. + init_fast_sqrt_function(); +#ifdef _WIN64 + init_modulo_function(); +#endif ElementsAccessor::InitializeOncePerProcess(); LOperand::SetUpCaches(); SetUpJSCallerSavedCodeData(); diff --git a/deps/v8/src/v8.h b/deps/v8/src/v8.h index d3f5a9c83..f019e68b7 100644 --- a/deps/v8/src/v8.h +++ b/deps/v8/src/v8.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // // Top include for all V8 .cc files. @@ -55,7 +32,7 @@ #include "v8checks.h" #include "allocation.h" #include "assert-scope.h" -#include "v8utils.h" +#include "utils.h" #include "flags.h" // Objects & heap @@ -66,6 +43,7 @@ #include "mark-compact-inl.h" #include "log-inl.h" #include "handles-inl.h" +#include "types-inl.h" #include "zone-inl.h" namespace v8 { @@ -97,12 +75,6 @@ class V8 : public AllStatic { // Support for entry hooking JITed code. static void SetFunctionEntryHook(FunctionEntryHook entry_hook); - static void AddCallCompletedCallback(CallCompletedCallback callback); - static void RemoveCallCompletedCallback(CallCompletedCallback callback); - static void FireCallCompletedCallback(Isolate* isolate); - - static void RunMicrotasks(Isolate* isolate); - static v8::ArrayBuffer::Allocator* ArrayBufferAllocator() { return array_buffer_allocator_; } @@ -120,8 +92,6 @@ class V8 : public AllStatic { static void InitializeOncePerProcessImpl(); static void InitializeOncePerProcess(); - // List of callbacks when a Call completes. - static List<CallCompletedCallback>* call_completed_callbacks_; // Allocator for external array buffers. static v8::ArrayBuffer::Allocator* array_buffer_allocator_; // v8::Platform to use. @@ -135,6 +105,4 @@ enum NilValue { kNullValue, kUndefinedValue }; } } // namespace v8::internal -namespace i = v8::internal; - #endif // V8_V8_H_ diff --git a/deps/v8/src/v8checks.h b/deps/v8/src/v8checks.h index 76e16fd24..79a308868 100644 --- a/deps/v8/src/v8checks.h +++ b/deps/v8/src/v8checks.h @@ -1,29 +1,6 @@ // Copyright 2006-2008 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8CHECKS_H_ #define V8_V8CHECKS_H_ diff --git a/deps/v8/src/v8conversions.cc b/deps/v8/src/v8conversions.cc deleted file mode 100644 index 900b62d10..000000000 --- a/deps/v8/src/v8conversions.cc +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include <stdarg.h> -#include <limits.h> - -#include "v8.h" - -#include "conversions-inl.h" -#include "v8conversions.h" -#include "dtoa.h" -#include "factory.h" -#include "strtod.h" - -namespace v8 { -namespace internal { - -namespace { - -// C++-style iterator adaptor for StringCharacterStream -// (unlike C++ iterators the end-marker has different type). -class StringCharacterStreamIterator { - public: - class EndMarker {}; - - explicit StringCharacterStreamIterator(StringCharacterStream* stream); - - uint16_t operator*() const; - void operator++(); - bool operator==(EndMarker const&) const { return end_; } - bool operator!=(EndMarker const& m) const { return !end_; } - - private: - StringCharacterStream* const stream_; - uint16_t current_; - bool end_; -}; - - -StringCharacterStreamIterator::StringCharacterStreamIterator( - StringCharacterStream* stream) : stream_(stream) { - ++(*this); -} - -uint16_t StringCharacterStreamIterator::operator*() const { - return current_; -} - - -void StringCharacterStreamIterator::operator++() { - end_ = !stream_->HasMore(); - if (!end_) { - current_ = stream_->GetNext(); - } -} -} // End anonymous namespace. - - -double StringToDouble(UnicodeCache* unicode_cache, - String* str, int flags, double empty_string_val) { - StringShape shape(str); - // TODO(dcarney): Use a Visitor here. - if (shape.IsSequentialAscii()) { - const uint8_t* begin = SeqOneByteString::cast(str)->GetChars(); - const uint8_t* end = begin + str->length(); - return InternalStringToDouble(unicode_cache, begin, end, flags, - empty_string_val); - } else if (shape.IsSequentialTwoByte()) { - const uc16* begin = SeqTwoByteString::cast(str)->GetChars(); - const uc16* end = begin + str->length(); - return InternalStringToDouble(unicode_cache, begin, end, flags, - empty_string_val); - } else { - ConsStringIteratorOp op; - StringCharacterStream stream(str, &op); - return InternalStringToDouble(unicode_cache, - StringCharacterStreamIterator(&stream), - StringCharacterStreamIterator::EndMarker(), - flags, - empty_string_val); - } -} - - -double StringToInt(UnicodeCache* unicode_cache, - String* str, - int radix) { - StringShape shape(str); - // TODO(dcarney): Use a Visitor here. - if (shape.IsSequentialAscii()) { - const uint8_t* begin = SeqOneByteString::cast(str)->GetChars(); - const uint8_t* end = begin + str->length(); - return InternalStringToInt(unicode_cache, begin, end, radix); - } else if (shape.IsSequentialTwoByte()) { - const uc16* begin = SeqTwoByteString::cast(str)->GetChars(); - const uc16* end = begin + str->length(); - return InternalStringToInt(unicode_cache, begin, end, radix); - } else { - ConsStringIteratorOp op; - StringCharacterStream stream(str, &op); - return InternalStringToInt(unicode_cache, - StringCharacterStreamIterator(&stream), - StringCharacterStreamIterator::EndMarker(), - radix); - } -} - -} } // namespace v8::internal diff --git a/deps/v8/src/v8conversions.h b/deps/v8/src/v8conversions.h deleted file mode 100644 index f2568c066..000000000 --- a/deps/v8/src/v8conversions.h +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef V8_V8CONVERSIONS_H_ -#define V8_V8CONVERSIONS_H_ - -#include "conversions.h" - -namespace v8 { -namespace internal { - - -static inline bool IsMinusZero(double value) { - static const DoubleRepresentation minus_zero(-0.0); - return DoubleRepresentation(value) == minus_zero; -} - - -// Integer32 is an integer that can be represented as a signed 32-bit -// integer. It has to be in the range [-2^31, 2^31 - 1]. -// We also have to check for negative 0 as it is not an Integer32. -static inline bool IsInt32Double(double value) { - return !IsMinusZero(value) && - value >= kMinInt && - value <= kMaxInt && - value == FastI2D(FastD2I(value)); -} - - -// Convert from Number object to C integer. -inline int32_t NumberToInt32(Object* number) { - if (number->IsSmi()) return Smi::cast(number)->value(); - return DoubleToInt32(number->Number()); -} - - -inline uint32_t NumberToUint32(Object* number) { - if (number->IsSmi()) return Smi::cast(number)->value(); - return DoubleToUint32(number->Number()); -} - - -// Converts a string into a double value according to ECMA-262 9.3.1 -double StringToDouble(UnicodeCache* unicode_cache, - String* str, - int flags, - double empty_string_val = 0); - -// Converts a string into an integer. -double StringToInt(UnicodeCache* unicode_cache, String* str, int radix); - -inline bool TryNumberToSize(Isolate* isolate, - Object* number, size_t* result) { - SealHandleScope shs(isolate); - if (number->IsSmi()) { - int value = Smi::cast(number)->value(); - ASSERT( - static_cast<unsigned>(Smi::kMaxValue) - <= std::numeric_limits<size_t>::max()); - if (value >= 0) { - *result = static_cast<size_t>(value); - return true; - } - return false; - } else { - ASSERT(number->IsHeapNumber()); - double value = HeapNumber::cast(number)->value(); - if (value >= 0 && - value <= std::numeric_limits<size_t>::max()) { - *result = static_cast<size_t>(value); - return true; - } else { - return false; - } - } -} - -// Converts a number into size_t. -inline size_t NumberToSize(Isolate* isolate, - Object* number) { - size_t result = 0; - bool is_valid = TryNumberToSize(isolate, number, &result); - CHECK(is_valid); - return result; -} - -} } // namespace v8::internal - -#endif // V8_V8CONVERSIONS_H_ diff --git a/deps/v8/src/v8dll-main.cc b/deps/v8/src/v8dll-main.cc index 7f6c9f955..dd0856902 100644 --- a/deps/v8/src/v8dll-main.cc +++ b/deps/v8/src/v8dll-main.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // The GYP based build ends up defining USING_V8_SHARED when compiling this // file. diff --git a/deps/v8/src/v8globals.h b/deps/v8/src/v8globals.h index e6cd94df2..b9ca952e7 100644 --- a/deps/v8/src/v8globals.h +++ b/deps/v8/src/v8globals.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8GLOBALS_H_ #define V8_V8GLOBALS_H_ @@ -59,6 +36,7 @@ const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits; const intptr_t kCodeAlignmentMask = kCodeAlignment - 1; // Tag information for Failure. +// TODO(yangguo): remove this from space owner calculation. const int kFailureTag = 3; const int kFailureTagSize = 2; const intptr_t kFailureTagMask = (1 << kFailureTagSize) - 1; @@ -128,6 +106,7 @@ class MemoryChunk; class SeededNumberDictionary; class UnseededNumberDictionary; class NameDictionary; +template <typename T> class MaybeHandle; template <typename T> class Handle; class Heap; class HeapObject; @@ -146,7 +125,6 @@ class MapSpace; class MarkCompactCollector; class NewSpace; class Object; -class MaybeObject; class OldSpace; class Foreign; class Scope; @@ -184,6 +162,7 @@ enum AllocationSpace { CELL_SPACE, // Only and all cell objects. PROPERTY_CELL_SPACE, // Only and all global property cell objects. LO_SPACE, // Promoted large objects. + INVALID_SPACE, // Only used in AllocationResult to signal success. FIRST_SPACE = NEW_SPACE, LAST_SPACE = LO_SPACE, @@ -282,8 +261,6 @@ enum InlineCacheState { enum CallFunctionFlags { NO_CALL_FUNCTION_FLAGS, - // The call target is cached in the instruction stream. - RECORD_CALL_TARGET, CALL_AS_METHOD, // Always wrap the receiver and call to the JSFunction. Only use this flag // both the receiver type and the target method are statically known. @@ -291,6 +268,13 @@ enum CallFunctionFlags { }; +enum CallConstructorFlags { + NO_CALL_CONSTRUCTOR_FLAGS, + // The call target is cached in the instruction stream. + RECORD_CONSTRUCTOR_TARGET +}; + + enum InlineCacheHolderFlag { OWN_MAP, // For fast properties objects. PROTOTYPE_MAP // For slow properties objects (except GlobalObjects). @@ -348,8 +332,8 @@ union IeeeDoubleBigEndianArchType { // AccessorCallback struct AccessorDescriptor { - MaybeObject* (*getter)(Isolate* isolate, Object* object, void* data); - MaybeObject* (*setter)( + Object* (*getter)(Isolate* isolate, Object* object, void* data); + Object* (*setter)( Isolate* isolate, JSObject* object, Object* value, void* data); void* data; }; @@ -565,4 +549,6 @@ enum MinusZeroMode { } } // namespace v8::internal +namespace i = v8::internal; + #endif // V8_V8GLOBALS_H_ diff --git a/deps/v8/src/v8memory.h b/deps/v8/src/v8memory.h index c72ce7ab7..615ec4fe8 100644 --- a/deps/v8/src/v8memory.h +++ b/deps/v8/src/v8memory.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_MEMORY_H_ #define V8_MEMORY_H_ diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js index f183afb96..cd60cb40d 100644 --- a/deps/v8/src/v8natives.js +++ b/deps/v8/src/v8natives.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // This file relies on the fact that the following declarations have been made // in runtime.js: @@ -1893,14 +1870,6 @@ SetUpFunction(); // Eventually, we should move to a real event queue that allows to maintain // relative ordering of different kinds of tasks. -function GetMicrotaskQueue() { - var microtaskState = %GetMicrotaskState(); - if (IS_UNDEFINED(microtaskState.queue)) { - microtaskState.queue = new InternalArray; - } - return microtaskState.queue; -} - function RunMicrotasks() { while (%SetMicrotaskPending(false)) { var microtaskState = %GetMicrotaskState(); @@ -1908,7 +1877,7 @@ function RunMicrotasks() { return; var microtasks = microtaskState.queue; - microtaskState.queue = new InternalArray; + microtaskState.queue = null; for (var i = 0; i < microtasks.length; i++) { microtasks[i](); @@ -1916,7 +1885,11 @@ function RunMicrotasks() { } } -function EnqueueExternalMicrotask(fn) { - GetMicrotaskQueue().push(fn); +function EnqueueMicrotask(fn) { + var microtaskState = %GetMicrotaskState(); + if (IS_UNDEFINED(microtaskState.queue) || IS_NULL(microtaskState.queue)) { + microtaskState.queue = new InternalArray; + } + microtaskState.queue.push(fn); %SetMicrotaskPending(true); } diff --git a/deps/v8/src/v8threads.cc b/deps/v8/src/v8threads.cc index 1de9d4fd7..410d0b133 100644 --- a/deps/v8/src/v8threads.cc +++ b/deps/v8/src/v8threads.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -74,10 +51,6 @@ void Locker::Initialize(v8::Isolate* isolate) { isolate_->stack_guard()->ClearThread(access); isolate_->stack_guard()->InitThread(access); } - if (isolate_->IsDefaultIsolate()) { - // This only enters if not yet entered. - internal::Isolate::EnterDefaultIsolate(); - } } ASSERT(isolate_->thread_manager()->IsLockedByCurrentThread()); } @@ -98,9 +71,6 @@ bool Locker::IsActive() { Locker::~Locker() { ASSERT(isolate_->thread_manager()->IsLockedByCurrentThread()); if (has_lock_) { - if (isolate_->IsDefaultIsolate()) { - isolate_->Exit(); - } if (top_level_) { isolate_->thread_manager()->FreeThreadResources(); } else { @@ -115,9 +85,6 @@ void Unlocker::Initialize(v8::Isolate* isolate) { ASSERT(isolate != NULL); isolate_ = reinterpret_cast<i::Isolate*>(isolate); ASSERT(isolate_->thread_manager()->IsLockedByCurrentThread()); - if (isolate_->IsDefaultIsolate()) { - isolate_->Exit(); - } isolate_->thread_manager()->ArchiveThread(); isolate_->thread_manager()->Unlock(); } @@ -127,9 +94,6 @@ Unlocker::~Unlocker() { ASSERT(!isolate_->thread_manager()->IsLockedByCurrentThread()); isolate_->thread_manager()->Lock(); isolate_->thread_manager()->RestoreThread(); - if (isolate_->IsDefaultIsolate()) { - isolate_->Enter(); - } } @@ -175,9 +139,7 @@ bool ThreadManager::RestoreThread() { from = isolate_->handle_scope_implementer()->RestoreThread(from); from = isolate_->RestoreThread(from); from = Relocatable::RestoreState(isolate_, from); -#ifdef ENABLE_DEBUGGER_SUPPORT from = isolate_->debug()->RestoreDebug(from); -#endif from = isolate_->stack_guard()->RestoreStackGuard(from); from = isolate_->regexp_stack()->RestoreStack(from); from = isolate_->bootstrapper()->RestoreState(from); @@ -209,9 +171,7 @@ void ThreadManager::Unlock() { static int ArchiveSpacePerThread() { return HandleScopeImplementer::ArchiveSpacePerThread() + Isolate::ArchiveSpacePerThread() + -#ifdef ENABLE_DEBUGGER_SUPPORT Debug::ArchiveSpacePerThread() + -#endif StackGuard::ArchiveSpacePerThread() + RegExpStack::ArchiveSpacePerThread() + Bootstrapper::ArchiveSpacePerThread() + @@ -337,9 +297,7 @@ void ThreadManager::EagerlyArchiveThread() { to = isolate_->handle_scope_implementer()->ArchiveThread(to); to = isolate_->ArchiveThread(to); to = Relocatable::ArchiveState(isolate_, to); -#ifdef ENABLE_DEBUGGER_SUPPORT to = isolate_->debug()->ArchiveDebug(to); -#endif to = isolate_->stack_guard()->ArchiveStackGuard(to); to = isolate_->regexp_stack()->ArchiveStack(to); to = isolate_->bootstrapper()->ArchiveState(to); @@ -351,9 +309,7 @@ void ThreadManager::EagerlyArchiveThread() { void ThreadManager::FreeThreadResources() { isolate_->handle_scope_implementer()->FreeThreadResources(); isolate_->FreeThreadResources(); -#ifdef ENABLE_DEBUGGER_SUPPORT isolate_->debug()->FreeThreadResources(); -#endif isolate_->stack_guard()->FreeThreadResources(); isolate_->regexp_stack()->FreeThreadResources(); isolate_->bootstrapper()->FreeThreadResources(); diff --git a/deps/v8/src/v8threads.h b/deps/v8/src/v8threads.h index a20700a5c..ca722adc6 100644 --- a/deps/v8/src/v8threads.h +++ b/deps/v8/src/v8threads.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_V8THREADS_H_ #define V8_V8THREADS_H_ diff --git a/deps/v8/src/v8utils.cc b/deps/v8/src/v8utils.cc deleted file mode 100644 index 7390d854e..000000000 --- a/deps/v8/src/v8utils.cc +++ /dev/null @@ -1,276 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include <stdarg.h> - -#include "v8.h" - -#include "platform.h" - -#include "sys/stat.h" - -namespace v8 { -namespace internal { - - -void PrintF(const char* format, ...) { - va_list arguments; - va_start(arguments, format); - OS::VPrint(format, arguments); - va_end(arguments); -} - - -void PrintF(FILE* out, const char* format, ...) { - va_list arguments; - va_start(arguments, format); - OS::VFPrint(out, format, arguments); - va_end(arguments); -} - - -void PrintPID(const char* format, ...) { - OS::Print("[%d] ", OS::GetCurrentProcessId()); - va_list arguments; - va_start(arguments, format); - OS::VPrint(format, arguments); - va_end(arguments); -} - - -void Flush(FILE* out) { - fflush(out); -} - - -char* ReadLine(const char* prompt) { - char* result = NULL; - char line_buf[256]; - int offset = 0; - bool keep_going = true; - fprintf(stdout, "%s", prompt); - fflush(stdout); - while (keep_going) { - if (fgets(line_buf, sizeof(line_buf), stdin) == NULL) { - // fgets got an error. Just give up. - if (result != NULL) { - DeleteArray(result); - } - return NULL; - } - int len = StrLength(line_buf); - if (len > 1 && - line_buf[len - 2] == '\\' && - line_buf[len - 1] == '\n') { - // When we read a line that ends with a "\" we remove the escape and - // append the remainder. - line_buf[len - 2] = '\n'; - line_buf[len - 1] = 0; - len -= 1; - } else if ((len > 0) && (line_buf[len - 1] == '\n')) { - // Since we read a new line we are done reading the line. This - // will exit the loop after copying this buffer into the result. - keep_going = false; - } - if (result == NULL) { - // Allocate the initial result and make room for the terminating '\0' - result = NewArray<char>(len + 1); - } else { - // Allocate a new result with enough room for the new addition. - int new_len = offset + len + 1; - char* new_result = NewArray<char>(new_len); - // Copy the existing input into the new array and set the new - // array as the result. - OS::MemCopy(new_result, result, offset * kCharSize); - DeleteArray(result); - result = new_result; - } - // Copy the newly read line into the result. - OS::MemCopy(result + offset, line_buf, len * kCharSize); - offset += len; - } - ASSERT(result != NULL); - result[offset] = '\0'; - return result; -} - - -char* ReadCharsFromFile(FILE* file, - int* size, - int extra_space, - bool verbose, - const char* filename) { - if (file == NULL || fseek(file, 0, SEEK_END) != 0) { - if (verbose) { - OS::PrintError("Cannot read from file %s.\n", filename); - } - return NULL; - } - - // Get the size of the file and rewind it. - *size = ftell(file); - rewind(file); - - char* result = NewArray<char>(*size + extra_space); - for (int i = 0; i < *size && feof(file) == 0;) { - int read = static_cast<int>(fread(&result[i], 1, *size - i, file)); - if (read != (*size - i) && ferror(file) != 0) { - fclose(file); - DeleteArray(result); - return NULL; - } - i += read; - } - return result; -} - - -char* ReadCharsFromFile(const char* filename, - int* size, - int extra_space, - bool verbose) { - FILE* file = OS::FOpen(filename, "rb"); - char* result = ReadCharsFromFile(file, size, extra_space, verbose, filename); - if (file != NULL) fclose(file); - return result; -} - - -byte* ReadBytes(const char* filename, int* size, bool verbose) { - char* chars = ReadCharsFromFile(filename, size, 0, verbose); - return reinterpret_cast<byte*>(chars); -} - - -static Vector<const char> SetVectorContents(char* chars, - int size, - bool* exists) { - if (!chars) { - *exists = false; - return Vector<const char>::empty(); - } - chars[size] = '\0'; - *exists = true; - return Vector<const char>(chars, size); -} - - -Vector<const char> ReadFile(const char* filename, - bool* exists, - bool verbose) { - int size; - char* result = ReadCharsFromFile(filename, &size, 1, verbose); - return SetVectorContents(result, size, exists); -} - - -Vector<const char> ReadFile(FILE* file, - bool* exists, - bool verbose) { - int size; - char* result = ReadCharsFromFile(file, &size, 1, verbose, ""); - return SetVectorContents(result, size, exists); -} - - -int WriteCharsToFile(const char* str, int size, FILE* f) { - int total = 0; - while (total < size) { - int write = static_cast<int>(fwrite(str, 1, size - total, f)); - if (write == 0) { - return total; - } - total += write; - str += write; - } - return total; -} - - -int AppendChars(const char* filename, - const char* str, - int size, - bool verbose) { - FILE* f = OS::FOpen(filename, "ab"); - if (f == NULL) { - if (verbose) { - OS::PrintError("Cannot open file %s for writing.\n", filename); - } - return 0; - } - int written = WriteCharsToFile(str, size, f); - fclose(f); - return written; -} - - -int WriteChars(const char* filename, - const char* str, - int size, - bool verbose) { - FILE* f = OS::FOpen(filename, "wb"); - if (f == NULL) { - if (verbose) { - OS::PrintError("Cannot open file %s for writing.\n", filename); - } - return 0; - } - int written = WriteCharsToFile(str, size, f); - fclose(f); - return written; -} - - -int WriteBytes(const char* filename, - const byte* bytes, - int size, - bool verbose) { - const char* str = reinterpret_cast<const char*>(bytes); - return WriteChars(filename, str, size, verbose); -} - - - -void StringBuilder::AddFormatted(const char* format, ...) { - va_list arguments; - va_start(arguments, format); - AddFormattedList(format, arguments); - va_end(arguments); -} - - -void StringBuilder::AddFormattedList(const char* format, va_list list) { - ASSERT(!is_finalized() && position_ <= buffer_.length()); - int n = OS::VSNPrintF(buffer_ + position_, format, list); - if (n < 0 || n >= (buffer_.length() - position_)) { - position_ = buffer_.length(); - } else { - position_ += n; - } -} - -} } // namespace v8::internal diff --git a/deps/v8/src/v8utils.h b/deps/v8/src/v8utils.h deleted file mode 100644 index 058b153a7..000000000 --- a/deps/v8/src/v8utils.h +++ /dev/null @@ -1,464 +0,0 @@ -// Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef V8_V8UTILS_H_ -#define V8_V8UTILS_H_ - -#include "utils.h" -#include "platform.h" // For va_list on Solaris. - -namespace v8 { -namespace internal { - -// ---------------------------------------------------------------------------- -// I/O support. - -#if __GNUC__ >= 4 -// On gcc we can ask the compiler to check the types of %d-style format -// specifiers and their associated arguments. TODO(erikcorry) fix this -// so it works on MacOSX. -#if defined(__MACH__) && defined(__APPLE__) -#define PRINTF_CHECKING -#define FPRINTF_CHECKING -#else // MacOsX. -#define PRINTF_CHECKING __attribute__ ((format (printf, 1, 2))) -#define FPRINTF_CHECKING __attribute__ ((format (printf, 2, 3))) -#endif -#else -#define PRINTF_CHECKING -#define FPRINTF_CHECKING -#endif - -// Our version of printf(). -void PRINTF_CHECKING PrintF(const char* format, ...); -void FPRINTF_CHECKING PrintF(FILE* out, const char* format, ...); - -// Prepends the current process ID to the output. -void PRINTF_CHECKING PrintPID(const char* format, ...); - -// Our version of fflush. -void Flush(FILE* out); - -inline void Flush() { - Flush(stdout); -} - - -// Read a line of characters after printing the prompt to stdout. The resulting -// char* needs to be disposed off with DeleteArray by the caller. -char* ReadLine(const char* prompt); - - -// Read and return the raw bytes in a file. the size of the buffer is returned -// in size. -// The returned buffer must be freed by the caller. -byte* ReadBytes(const char* filename, int* size, bool verbose = true); - - -// Append size chars from str to the file given by filename. -// The file is overwritten. Returns the number of chars written. -int AppendChars(const char* filename, - const char* str, - int size, - bool verbose = true); - - -// Write size chars from str to the file given by filename. -// The file is overwritten. Returns the number of chars written. -int WriteChars(const char* filename, - const char* str, - int size, - bool verbose = true); - - -// Write size bytes to the file given by filename. -// The file is overwritten. Returns the number of bytes written. -int WriteBytes(const char* filename, - const byte* bytes, - int size, - bool verbose = true); - - -// Write the C code -// const char* <varname> = "<str>"; -// const int <varname>_len = <len>; -// to the file given by filename. Only the first len chars are written. -int WriteAsCFile(const char* filename, const char* varname, - const char* str, int size, bool verbose = true); - - -// ---------------------------------------------------------------------------- -// Data structures - -template <typename T> -inline Vector< Handle<Object> > HandleVector(v8::internal::Handle<T>* elms, - int length) { - return Vector< Handle<Object> >( - reinterpret_cast<v8::internal::Handle<Object>*>(elms), length); -} - - -// ---------------------------------------------------------------------------- -// Memory - -// Copies words from |src| to |dst|. The data spans must not overlap. -template <typename T> -inline void CopyWords(T* dst, const T* src, size_t num_words) { - STATIC_ASSERT(sizeof(T) == kPointerSize); - ASSERT(Min(dst, const_cast<T*>(src)) + num_words <= - Max(dst, const_cast<T*>(src))); - ASSERT(num_words > 0); - - // Use block copying OS::MemCopy if the segment we're copying is - // enough to justify the extra call/setup overhead. - static const size_t kBlockCopyLimit = 16; - - if (num_words < kBlockCopyLimit) { - do { - num_words--; - *dst++ = *src++; - } while (num_words > 0); - } else { - OS::MemCopy(dst, src, num_words * kPointerSize); - } -} - - -// Copies words from |src| to |dst|. No restrictions. -template <typename T> -inline void MoveWords(T* dst, const T* src, size_t num_words) { - STATIC_ASSERT(sizeof(T) == kPointerSize); - ASSERT(num_words > 0); - - // Use block copying OS::MemCopy if the segment we're copying is - // enough to justify the extra call/setup overhead. - static const size_t kBlockCopyLimit = 16; - - if (num_words < kBlockCopyLimit && - ((dst < src) || (dst >= (src + num_words * kPointerSize)))) { - T* end = dst + num_words; - do { - num_words--; - *dst++ = *src++; - } while (num_words > 0); - } else { - OS::MemMove(dst, src, num_words * kPointerSize); - } -} - - -// Copies data from |src| to |dst|. The data spans must not overlap. -template <typename T> -inline void CopyBytes(T* dst, const T* src, size_t num_bytes) { - STATIC_ASSERT(sizeof(T) == 1); - ASSERT(Min(dst, const_cast<T*>(src)) + num_bytes <= - Max(dst, const_cast<T*>(src))); - if (num_bytes == 0) return; - - // Use block copying OS::MemCopy if the segment we're copying is - // enough to justify the extra call/setup overhead. - static const int kBlockCopyLimit = OS::kMinComplexMemCopy; - - if (num_bytes < static_cast<size_t>(kBlockCopyLimit)) { - do { - num_bytes--; - *dst++ = *src++; - } while (num_bytes > 0); - } else { - OS::MemCopy(dst, src, num_bytes); - } -} - - -template <typename T, typename U> -inline void MemsetPointer(T** dest, U* value, int counter) { -#ifdef DEBUG - T* a = NULL; - U* b = NULL; - a = b; // Fake assignment to check assignability. - USE(a); -#endif // DEBUG -#if V8_HOST_ARCH_IA32 -#define STOS "stosl" -#elif V8_HOST_ARCH_X64 -#define STOS "stosq" -#endif -#if defined(__native_client__) - // This STOS sequence does not validate for x86_64 Native Client. - // Here we #undef STOS to force use of the slower C version. - // TODO(bradchen): Profile V8 and implement a faster REP STOS - // here if the profile indicates it matters. -#undef STOS -#endif - -#if defined(__GNUC__) && defined(STOS) - asm volatile( - "cld;" - "rep ; " STOS - : "+&c" (counter), "+&D" (dest) - : "a" (value) - : "memory", "cc"); -#else - for (int i = 0; i < counter; i++) { - dest[i] = value; - } -#endif - -#undef STOS -} - - -// Simple wrapper that allows an ExternalString to refer to a -// Vector<const char>. Doesn't assume ownership of the data. -class AsciiStringAdapter: public v8::String::ExternalAsciiStringResource { - public: - explicit AsciiStringAdapter(Vector<const char> data) : data_(data) {} - - virtual const char* data() const { return data_.start(); } - - virtual size_t length() const { return data_.length(); } - - private: - Vector<const char> data_; -}; - - -// Simple support to read a file into a 0-terminated C-string. -// The returned buffer must be freed by the caller. -// On return, *exits tells whether the file existed. -Vector<const char> ReadFile(const char* filename, - bool* exists, - bool verbose = true); -Vector<const char> ReadFile(FILE* file, - bool* exists, - bool verbose = true); - - -template <typename sourcechar, typename sinkchar> -INLINE(static void CopyCharsUnsigned(sinkchar* dest, - const sourcechar* src, - int chars)); -#if defined(V8_HOST_ARCH_ARM) -INLINE(void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars)); -INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint8_t* src, int chars)); -INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars)); -#elif defined(V8_HOST_ARCH_MIPS) -INLINE(void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars)); -INLINE(void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars)); -#endif - -// Copy from ASCII/16bit chars to ASCII/16bit chars. -template <typename sourcechar, typename sinkchar> -INLINE(void CopyChars(sinkchar* dest, const sourcechar* src, int chars)); - -template<typename sourcechar, typename sinkchar> -void CopyChars(sinkchar* dest, const sourcechar* src, int chars) { - ASSERT(sizeof(sourcechar) <= 2); - ASSERT(sizeof(sinkchar) <= 2); - if (sizeof(sinkchar) == 1) { - if (sizeof(sourcechar) == 1) { - CopyCharsUnsigned(reinterpret_cast<uint8_t*>(dest), - reinterpret_cast<const uint8_t*>(src), - chars); - } else { - CopyCharsUnsigned(reinterpret_cast<uint8_t*>(dest), - reinterpret_cast<const uint16_t*>(src), - chars); - } - } else { - if (sizeof(sourcechar) == 1) { - CopyCharsUnsigned(reinterpret_cast<uint16_t*>(dest), - reinterpret_cast<const uint8_t*>(src), - chars); - } else { - CopyCharsUnsigned(reinterpret_cast<uint16_t*>(dest), - reinterpret_cast<const uint16_t*>(src), - chars); - } - } -} - -template <typename sourcechar, typename sinkchar> -void CopyCharsUnsigned(sinkchar* dest, const sourcechar* src, int chars) { - sinkchar* limit = dest + chars; -#ifdef V8_HOST_CAN_READ_UNALIGNED - if (sizeof(*dest) == sizeof(*src)) { - if (chars >= static_cast<int>(OS::kMinComplexMemCopy / sizeof(*dest))) { - OS::MemCopy(dest, src, chars * sizeof(*dest)); - return; - } - // Number of characters in a uintptr_t. - static const int kStepSize = sizeof(uintptr_t) / sizeof(*dest); // NOLINT - ASSERT(dest + kStepSize > dest); // Check for overflow. - while (dest + kStepSize <= limit) { - *reinterpret_cast<uintptr_t*>(dest) = - *reinterpret_cast<const uintptr_t*>(src); - dest += kStepSize; - src += kStepSize; - } - } -#endif - while (dest < limit) { - *dest++ = static_cast<sinkchar>(*src++); - } -} - - -#if defined(V8_HOST_ARCH_ARM) -void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars) { - switch (static_cast<unsigned>(chars)) { - case 0: - break; - case 1: - *dest = *src; - break; - case 2: - memcpy(dest, src, 2); - break; - case 3: - memcpy(dest, src, 3); - break; - case 4: - memcpy(dest, src, 4); - break; - case 5: - memcpy(dest, src, 5); - break; - case 6: - memcpy(dest, src, 6); - break; - case 7: - memcpy(dest, src, 7); - break; - case 8: - memcpy(dest, src, 8); - break; - case 9: - memcpy(dest, src, 9); - break; - case 10: - memcpy(dest, src, 10); - break; - case 11: - memcpy(dest, src, 11); - break; - case 12: - memcpy(dest, src, 12); - break; - case 13: - memcpy(dest, src, 13); - break; - case 14: - memcpy(dest, src, 14); - break; - case 15: - memcpy(dest, src, 15); - break; - default: - OS::MemCopy(dest, src, chars); - break; - } -} - - -void CopyCharsUnsigned(uint16_t* dest, const uint8_t* src, int chars) { - if (chars >= OS::kMinComplexConvertMemCopy) { - OS::MemCopyUint16Uint8(dest, src, chars); - } else { - OS::MemCopyUint16Uint8Wrapper(dest, src, chars); - } -} - - -void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars) { - switch (static_cast<unsigned>(chars)) { - case 0: - break; - case 1: - *dest = *src; - break; - case 2: - memcpy(dest, src, 4); - break; - case 3: - memcpy(dest, src, 6); - break; - case 4: - memcpy(dest, src, 8); - break; - case 5: - memcpy(dest, src, 10); - break; - case 6: - memcpy(dest, src, 12); - break; - case 7: - memcpy(dest, src, 14); - break; - default: - OS::MemCopy(dest, src, chars * sizeof(*dest)); - break; - } -} - - -#elif defined(V8_HOST_ARCH_MIPS) -void CopyCharsUnsigned(uint8_t* dest, const uint8_t* src, int chars) { - if (chars < OS::kMinComplexMemCopy) { - memcpy(dest, src, chars); - } else { - OS::MemCopy(dest, src, chars); - } -} - -void CopyCharsUnsigned(uint16_t* dest, const uint16_t* src, int chars) { - if (chars < OS::kMinComplexMemCopy) { - memcpy(dest, src, chars * sizeof(*dest)); - } else { - OS::MemCopy(dest, src, chars * sizeof(*dest)); - } -} -#endif - - -class StringBuilder : public SimpleStringBuilder { - public: - explicit StringBuilder(int size) : SimpleStringBuilder(size) { } - StringBuilder(char* buffer, int size) : SimpleStringBuilder(buffer, size) { } - - // Add formatted contents to the builder just like printf(). - void AddFormatted(const char* format, ...); - - // Add formatted contents like printf based on a va_list. - void AddFormattedList(const char* format, va_list list); - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(StringBuilder); -}; - -} } // namespace v8::internal - -#endif // V8_V8UTILS_H_ diff --git a/deps/v8/src/variables.cc b/deps/v8/src/variables.cc index 6c4ea527c..3b90e0eea 100644 --- a/deps/v8/src/variables.cc +++ b/deps/v8/src/variables.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -58,7 +35,7 @@ const char* Variable::Mode2String(VariableMode mode) { Variable::Variable(Scope* scope, Handle<String> name, VariableMode mode, - bool is_valid_LHS, + bool is_valid_ref, Kind kind, InitializationFlag initialization_flag, Interface* interface) @@ -70,7 +47,7 @@ Variable::Variable(Scope* scope, index_(-1), initializer_position_(RelocInfo::kNoPosition), local_if_not_shadowed_(NULL), - is_valid_LHS_(is_valid_LHS), + is_valid_ref_(is_valid_ref), force_context_allocation_(false), is_used_(false), initialization_flag_(initialization_flag), diff --git a/deps/v8/src/variables.h b/deps/v8/src/variables.h index 401d04446..3d8e130d3 100644 --- a/deps/v8/src/variables.h +++ b/deps/v8/src/variables.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_VARIABLES_H_ #define V8_VARIABLES_H_ @@ -77,7 +54,7 @@ class Variable: public ZoneObject { Variable(Scope* scope, Handle<String> name, VariableMode mode, - bool is_valid_lhs, + bool is_valid_ref, Kind kind, InitializationFlag initialization_flag, Interface* interface = Interface::NewValue()); @@ -85,7 +62,7 @@ class Variable: public ZoneObject { // Printing support static const char* Mode2String(VariableMode mode); - bool IsValidLeftHandSide() { return is_valid_LHS_; } + bool IsValidReference() { return is_valid_ref_; } // The source code for an eval() call may refer to a variable that is // in an outer scope about which we don't know anything (it may not @@ -172,8 +149,8 @@ class Variable: public ZoneObject { // binding scope (exclusive). Variable* local_if_not_shadowed_; - // Valid as a LHS? (const and this are not valid LHS, for example) - bool is_valid_LHS_; + // Valid as a reference? (const and this are not valid, for example) + bool is_valid_ref_; // Usage info. bool force_context_allocation_; // set by variable resolver diff --git a/deps/v8/src/vector.h b/deps/v8/src/vector.h new file mode 100644 index 000000000..9e8c200ed --- /dev/null +++ b/deps/v8/src/vector.h @@ -0,0 +1,171 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_VECTOR_H_ +#define V8_VECTOR_H_ + +#include <string.h> +#include <algorithm> + +#include "allocation.h" +#include "checks.h" +#include "globals.h" + +namespace v8 { +namespace internal { + + +template <typename T> +class Vector { + public: + Vector() : start_(NULL), length_(0) {} + Vector(T* data, int length) : start_(data), length_(length) { + ASSERT(length == 0 || (length > 0 && data != NULL)); + } + + static Vector<T> New(int length) { + return Vector<T>(NewArray<T>(length), length); + } + + // Returns a vector using the same backing storage as this one, + // spanning from and including 'from', to but not including 'to'. + Vector<T> SubVector(int from, int to) { + SLOW_ASSERT(to <= length_); + SLOW_ASSERT(from < to); + ASSERT(0 <= from); + return Vector<T>(start() + from, to - from); + } + + // Returns the length of the vector. + int length() const { return length_; } + + // Returns whether or not the vector is empty. + bool is_empty() const { return length_ == 0; } + + // Returns the pointer to the start of the data in the vector. + T* start() const { return start_; } + + // Access individual vector elements - checks bounds in debug mode. + T& operator[](int index) const { + ASSERT(0 <= index && index < length_); + return start_[index]; + } + + const T& at(int index) const { return operator[](index); } + + T& first() { return start_[0]; } + + T& last() { return start_[length_ - 1]; } + + // Returns a clone of this vector with a new backing store. + Vector<T> Clone() const { + T* result = NewArray<T>(length_); + for (int i = 0; i < length_; i++) result[i] = start_[i]; + return Vector<T>(result, length_); + } + + void Sort(int (*cmp)(const T*, const T*)) { + std::sort(start(), start() + length(), RawComparer(cmp)); + } + + void Sort() { + std::sort(start(), start() + length()); + } + + void Truncate(int length) { + ASSERT(length <= length_); + length_ = length; + } + + // Releases the array underlying this vector. Once disposed the + // vector is empty. + void Dispose() { + DeleteArray(start_); + start_ = NULL; + length_ = 0; + } + + inline Vector<T> operator+(int offset) { + ASSERT(offset < length_); + return Vector<T>(start_ + offset, length_ - offset); + } + + // Factory method for creating empty vectors. + static Vector<T> empty() { return Vector<T>(NULL, 0); } + + template<typename S> + static Vector<T> cast(Vector<S> input) { + return Vector<T>(reinterpret_cast<T*>(input.start()), + input.length() * sizeof(S) / sizeof(T)); + } + + protected: + void set_start(T* start) { start_ = start; } + + private: + T* start_; + int length_; + + class RawComparer { + public: + explicit RawComparer(int (*cmp)(const T*, const T*)) : cmp_(cmp) {} + bool operator()(const T& a, const T& b) { + return cmp_(&a, &b) < 0; + } + + private: + int (*cmp_)(const T*, const T*); + }; +}; + + +template <typename T> +class ScopedVector : public Vector<T> { + public: + explicit ScopedVector(int length) : Vector<T>(NewArray<T>(length), length) { } + ~ScopedVector() { + DeleteArray(this->start()); + } + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(ScopedVector); +}; + + +inline int StrLength(const char* string) { + size_t length = strlen(string); + ASSERT(length == static_cast<size_t>(static_cast<int>(length))); + return static_cast<int>(length); +} + + +#define STATIC_ASCII_VECTOR(x) \ + v8::internal::Vector<const uint8_t>(reinterpret_cast<const uint8_t*>(x), \ + ARRAY_SIZE(x)-1) + +inline Vector<const char> CStrVector(const char* data) { + return Vector<const char>(data, StrLength(data)); +} + +inline Vector<const uint8_t> OneByteVector(const char* data, int length) { + return Vector<const uint8_t>(reinterpret_cast<const uint8_t*>(data), length); +} + +inline Vector<const uint8_t> OneByteVector(const char* data) { + return OneByteVector(data, StrLength(data)); +} + +inline Vector<char> MutableCStrVector(char* data) { + return Vector<char>(data, StrLength(data)); +} + +inline Vector<char> MutableCStrVector(char* data, int max) { + int length = StrLength(data); + return Vector<char>(data, (length < max) ? length : max); +} + + +} } // namespace v8::internal + +#endif // V8_VECTOR_H_ diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc index 904b067a6..33755eb1f 100644 --- a/deps/v8/src/version.cc +++ b/deps/v8/src/version.cc @@ -33,8 +33,8 @@ // NOTE these macros are used by some of the tool scripts and the build // system so their names cannot be changed without changing the scripts. #define MAJOR_VERSION 3 -#define MINOR_VERSION 25 -#define BUILD_NUMBER 30 +#define MINOR_VERSION 26 +#define BUILD_NUMBER 33 #define PATCH_LEVEL 0 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/version.h b/deps/v8/src/version.h index 4b3e7e2bd..b0a607152 100644 --- a/deps/v8/src/version.h +++ b/deps/v8/src/version.h @@ -1,29 +1,6 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_VERSION_H_ #define V8_VERSION_H_ diff --git a/deps/v8/src/vm-state-inl.h b/deps/v8/src/vm-state-inl.h index 5bee438b6..f26c48bd7 100644 --- a/deps/v8/src/vm-state-inl.h +++ b/deps/v8/src/vm-state-inl.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_VM_STATE_INL_H_ #define V8_VM_STATE_INL_H_ diff --git a/deps/v8/src/vm-state.h b/deps/v8/src/vm-state.h index f592bb92c..9b3bed69d 100644 --- a/deps/v8/src/vm-state.h +++ b/deps/v8/src/vm-state.h @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_VM_STATE_H_ #define V8_VM_STATE_H_ diff --git a/deps/v8/src/weak_collection.js b/deps/v8/src/weak_collection.js index 81d4ab536..4c26d2574 100644 --- a/deps/v8/src/weak_collection.js +++ b/deps/v8/src/weak_collection.js @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. "use strict"; diff --git a/deps/v8/src/win32-headers.h b/deps/v8/src/win32-headers.h index ba595b97d..5ede3b313 100644 --- a/deps/v8/src/win32-headers.h +++ b/deps/v8/src/win32-headers.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_WIN32_HEADERS_H_ #define V8_WIN32_HEADERS_H_ diff --git a/deps/v8/src/win32-math.cc b/deps/v8/src/win32-math.cc index 8f6d07743..fb42383de 100644 --- a/deps/v8/src/win32-math.cc +++ b/deps/v8/src/win32-math.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Extra POSIX/ANSI routines for Win32 when using Visual Studio C++. Please // refer to The Open Group Base Specification for specification of the correct diff --git a/deps/v8/src/win32-math.h b/deps/v8/src/win32-math.h index fd9312b0f..7b7cbc925 100644 --- a/deps/v8/src/win32-math.h +++ b/deps/v8/src/win32-math.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // Extra POSIX/ANSI routines for Win32 when using Visual Studio C++. Please // refer to The Open Group Base Specification for specification of the correct diff --git a/deps/v8/src/x64/assembler-x64-inl.h b/deps/v8/src/x64/assembler-x64-inl.h index a559b6275..be9902289 100644 --- a/deps/v8/src/x64/assembler-x64-inl.h +++ b/deps/v8/src/x64/assembler-x64-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_ASSEMBLER_X64_INL_H_ #define V8_X64_ASSEMBLER_X64_INL_H_ @@ -43,7 +20,8 @@ namespace internal { static const byte kCallOpcode = 0xE8; -static const int kNoCodeAgeSequenceLength = 6; +// The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi). +static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ? 6 : 17; void Assembler::emitl(uint32_t x) { @@ -392,12 +370,8 @@ bool RelocInfo::IsPatchedReturnSequence() { // movq(rsp, rbp); pop(rbp); ret(n); int3 *6 // The 11th byte is int3 (0xCC) in the return sequence and // REX.WB (0x48+register bit) for the call sequence. -#ifdef ENABLE_DEBUGGER_SUPPORT return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] != 0xCC; -#else - return false; -#endif } @@ -483,14 +457,12 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { CPU::FlushICache(pc_, sizeof(Address)); } else if (RelocInfo::IsCodeAgeSequence(mode)) { visitor->VisitCodeAgeSequence(this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence())) && isolate->debug()->has_break_points()) { visitor->VisitDebugTarget(this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { visitor->VisitRuntimeEntry(this); } @@ -512,14 +484,12 @@ void RelocInfo::Visit(Heap* heap) { CPU::FlushICache(pc_, sizeof(Address)); } else if (RelocInfo::IsCodeAgeSequence(mode)) { StaticVisitor::VisitCodeAgeSequence(heap, this); -#ifdef ENABLE_DEBUGGER_SUPPORT } else if (heap->isolate()->debug()->has_break_points() && ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) || (RelocInfo::IsDebugBreakSlot(mode) && IsPatchedDebugBreakSlotSequence()))) { StaticVisitor::VisitDebugTarget(heap, this); -#endif } else if (RelocInfo::IsRuntimeEntry(mode)) { StaticVisitor::VisitRuntimeEntry(this); } diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc index 60383da01..306a54d82 100644 --- a/deps/v8/src/x64/assembler-x64.cc +++ b/deps/v8/src/x64/assembler-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -52,13 +29,13 @@ ExternalReference ExternalReference::cpu_features() { } -void CpuFeatures::Probe() { +void CpuFeatures::Probe(bool serializer_enabled) { ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures); #ifdef DEBUG initialized_ = true; #endif supported_ = kDefaultCpuFeatures; - if (Serializer::enabled()) { + if (serializer_enabled) { supported_ |= OS::CpuFeaturesImpliedByPlatform(); return; // No features if we might serialize. } @@ -467,24 +444,30 @@ void Assembler::emit_operand(int code, const Operand& adr) { // Assembler Instruction implementations. -void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) { +void Assembler::arithmetic_op(byte opcode, + Register reg, + const Operand& op, + int size) { EnsureSpace ensure_space(this); - emit_rex_64(reg, op); + emit_rex(reg, op, size); emit(opcode); emit_operand(reg, op); } -void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) { +void Assembler::arithmetic_op(byte opcode, + Register reg, + Register rm_reg, + int size) { EnsureSpace ensure_space(this); ASSERT((opcode & 0xC6) == 2); if (rm_reg.low_bits() == 4) { // Forces SIB byte. // Swap reg and rm_reg and change opcode operand order. - emit_rex_64(rm_reg, reg); + emit_rex(rm_reg, reg, size); emit(opcode ^ 0x02); emit_modrm(rm_reg, reg); } else { - emit_rex_64(reg, rm_reg); + emit_rex(reg, rm_reg, size); emit(opcode); emit_modrm(reg, rm_reg); } @@ -520,37 +503,45 @@ void Assembler::arithmetic_op_16(byte opcode, } -void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) { +void Assembler::arithmetic_op_8(byte opcode, Register reg, const Operand& op) { + EnsureSpace ensure_space(this); + if (!reg.is_byte_register()) { + // Register is not one of al, bl, cl, dl. Its encoding needs REX. + emit_rex_32(reg); + } + emit(opcode); + emit_operand(reg, op); +} + + +void Assembler::arithmetic_op_8(byte opcode, Register reg, Register rm_reg) { EnsureSpace ensure_space(this); ASSERT((opcode & 0xC6) == 2); - if (rm_reg.low_bits() == 4) { // Forces SIB byte. + if (rm_reg.low_bits() == 4) { // Forces SIB byte. // Swap reg and rm_reg and change opcode operand order. - emit_optional_rex_32(rm_reg, reg); - emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD. + if (!rm_reg.is_byte_register() || !reg.is_byte_register()) { + // Register is not one of al, bl, cl, dl. Its encoding needs REX. + emit_rex_32(rm_reg, reg); + } + emit(opcode ^ 0x02); emit_modrm(rm_reg, reg); } else { - emit_optional_rex_32(reg, rm_reg); + if (!reg.is_byte_register() || !rm_reg.is_byte_register()) { + // Register is not one of al, bl, cl, dl. Its encoding needs REX. + emit_rex_32(reg, rm_reg); + } emit(opcode); emit_modrm(reg, rm_reg); } } -void Assembler::arithmetic_op_32(byte opcode, - Register reg, - const Operand& rm_reg) { - EnsureSpace ensure_space(this); - emit_optional_rex_32(reg, rm_reg); - emit(opcode); - emit_operand(reg, rm_reg); -} - - void Assembler::immediate_arithmetic_op(byte subcode, Register dst, - Immediate src) { + Immediate src, + int size) { EnsureSpace ensure_space(this); - emit_rex_64(dst); + emit_rex(dst, size); if (is_int8(src.value_)) { emit(0x83); emit_modrm(subcode, dst); @@ -567,9 +558,10 @@ void Assembler::immediate_arithmetic_op(byte subcode, void Assembler::immediate_arithmetic_op(byte subcode, const Operand& dst, - Immediate src) { + Immediate src, + int size) { EnsureSpace ensure_space(this); - emit_rex_64(dst); + emit_rex(dst, size); if (is_int8(src.value_)) { emit(0x83); emit_operand(subcode, dst); @@ -621,43 +613,6 @@ void Assembler::immediate_arithmetic_op_16(byte subcode, } -void Assembler::immediate_arithmetic_op_32(byte subcode, - Register dst, - Immediate src) { - EnsureSpace ensure_space(this); - emit_optional_rex_32(dst); - if (is_int8(src.value_)) { - emit(0x83); - emit_modrm(subcode, dst); - emit(src.value_); - } else if (dst.is(rax)) { - emit(0x05 | (subcode << 3)); - emitl(src.value_); - } else { - emit(0x81); - emit_modrm(subcode, dst); - emitl(src.value_); - } -} - - -void Assembler::immediate_arithmetic_op_32(byte subcode, - const Operand& dst, - Immediate src) { - EnsureSpace ensure_space(this); - emit_optional_rex_32(dst); - if (is_int8(src.value_)) { - emit(0x83); - emit_operand(subcode, dst); - emit(src.value_); - } else { - emit(0x81); - emit_operand(subcode, dst); - emitl(src.value_); - } -} - - void Assembler::immediate_arithmetic_op_8(byte subcode, const Operand& dst, Immediate src) { @@ -675,8 +630,8 @@ void Assembler::immediate_arithmetic_op_8(byte subcode, Immediate src) { EnsureSpace ensure_space(this); if (!dst.is_byte_register()) { - // Use 64-bit mode byte registers. - emit_rex_64(dst); + // Register is not one of al, bl, cl, dl. Its encoding needs REX. + emit_rex_32(dst); } ASSERT(is_int8(src.value_) || is_uint8(src.value_)); emit(0x80); @@ -685,15 +640,19 @@ void Assembler::immediate_arithmetic_op_8(byte subcode, } -void Assembler::shift(Register dst, Immediate shift_amount, int subcode) { +void Assembler::shift(Register dst, + Immediate shift_amount, + int subcode, + int size) { EnsureSpace ensure_space(this); - ASSERT(is_uint6(shift_amount.value_)); // illegal shift count + ASSERT(size == kInt64Size ? is_uint6(shift_amount.value_) + : is_uint5(shift_amount.value_)); if (shift_amount.value_ == 1) { - emit_rex_64(dst); + emit_rex(dst, size); emit(0xD1); emit_modrm(subcode, dst); } else { - emit_rex_64(dst); + emit_rex(dst, size); emit(0xC1); emit_modrm(subcode, dst); emit(shift_amount.value_); @@ -701,38 +660,14 @@ void Assembler::shift(Register dst, Immediate shift_amount, int subcode) { } -void Assembler::shift(Register dst, int subcode) { - EnsureSpace ensure_space(this); - emit_rex_64(dst); - emit(0xD3); - emit_modrm(subcode, dst); -} - - -void Assembler::shift_32(Register dst, int subcode) { +void Assembler::shift(Register dst, int subcode, int size) { EnsureSpace ensure_space(this); - emit_optional_rex_32(dst); + emit_rex(dst, size); emit(0xD3); emit_modrm(subcode, dst); } -void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) { - EnsureSpace ensure_space(this); - ASSERT(is_uint5(shift_amount.value_)); // illegal shift count - if (shift_amount.value_ == 1) { - emit_optional_rex_32(dst); - emit(0xD1); - emit_modrm(subcode, dst); - } else { - emit_optional_rex_32(dst); - emit(0xC1); - emit_modrm(subcode, dst); - emit(shift_amount.value_); - } -} - - void Assembler::bt(const Operand& dst, Register src) { EnsureSpace ensure_space(this); emit_rex_64(src, dst); @@ -1431,6 +1366,15 @@ void Assembler::movl(const Operand& dst, Label* src) { } +void Assembler::movsxbl(Register dst, const Operand& src) { + EnsureSpace ensure_space(this); + emit_optional_rex_32(dst, src); + emit(0x0F); + emit(0xBE); + emit_operand(dst, src); +} + + void Assembler::movsxbq(Register dst, const Operand& src) { EnsureSpace ensure_space(this); emit_rex_64(dst, src); @@ -1440,6 +1384,15 @@ void Assembler::movsxbq(Register dst, const Operand& src) { } +void Assembler::movsxwl(Register dst, const Operand& src) { + EnsureSpace ensure_space(this); + emit_optional_rex_32(dst, src); + emit(0x0F); + emit(0xBF); + emit_operand(dst, src); +} + + void Assembler::movsxwq(Register dst, const Operand& src) { EnsureSpace ensure_space(this); emit_rex_64(dst, src); @@ -2977,12 +2930,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { ASSERT(!RelocInfo::IsNone(rmode)); if (rmode == RelocInfo::EXTERNAL_REFERENCE) { // Don't record external references unless the heap will be serialized. -#ifdef DEBUG - if (!Serializer::enabled()) { - Serializer::TooLateToEnableNow(); - } -#endif - if (!Serializer::enabled() && !emit_debug_code()) { + if (!Serializer::enabled(isolate()) && !emit_debug_code()) { return; } } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) { @@ -3016,16 +2964,17 @@ void Assembler::RecordComment(const char* msg, bool force) { } -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { +Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { // No out-of-line constant pool support. - UNREACHABLE(); - return NULL; + ASSERT(!FLAG_enable_ool_constant_pool); + return isolate->factory()->empty_constant_pool_array(); } void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { // No out-of-line constant pool support. - UNREACHABLE(); + ASSERT(!FLAG_enable_ool_constant_pool); + return; } diff --git a/deps/v8/src/x64/assembler-x64.h b/deps/v8/src/x64/assembler-x64.h index d47ca32e0..685d46c09 100644 --- a/deps/v8/src/x64/assembler-x64.h +++ b/deps/v8/src/x64/assembler-x64.h @@ -357,6 +357,10 @@ inline Condition ReverseCondition(Condition cc) { class Immediate BASE_EMBEDDED { public: explicit Immediate(int32_t value) : value_(value) {} + explicit Immediate(Smi* value) { + ASSERT(SmiValuesAre31Bits()); // Only available for 31-bit SMI. + value_ = static_cast<int32_t>(reinterpret_cast<intptr_t>(value)); + } private: int32_t value_; @@ -446,7 +450,7 @@ class CpuFeatures : public AllStatic { public: // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). - static void Probe(); + static void Probe(bool serializer_enabled); // Check whether a feature is supported by the target CPU. static bool IsSupported(CpuFeature f) { @@ -459,15 +463,11 @@ class CpuFeatures : public AllStatic { return Check(f, supported_); } - static bool IsFoundByRuntimeProbingOnly(CpuFeature f) { - ASSERT(initialized_); - return Check(f, found_by_runtime_probing_only_); - } - - static bool IsSafeForSnapshot(CpuFeature f) { + static bool IsSafeForSnapshot(Isolate* isolate, CpuFeature f) { return Check(f, cross_compile_) || (IsSupported(f) && - (!Serializer::enabled() || !IsFoundByRuntimeProbingOnly(f))); + !(Serializer::enabled(isolate) && + Check(f, found_by_runtime_probing_only_))); } static bool VerifyCrossCompiling() { @@ -480,6 +480,8 @@ class CpuFeatures : public AllStatic { (cross_compile_ & mask) == mask; } + static bool SupportsCrankshaft() { return true; } + private: static bool Check(CpuFeature f, uint64_t set) { return (set & flag2set(f)) != 0; @@ -532,6 +534,18 @@ class CpuFeatures : public AllStatic { V(xor) +// Shift instructions on operands/registers with kPointerSize, kInt32Size and +// kInt64Size. +#define SHIFT_INSTRUCTION_LIST(V) \ + V(rol, 0x0) \ + V(ror, 0x1) \ + V(rcl, 0x2) \ + V(rcr, 0x3) \ + V(shl, 0x4) \ + V(shr, 0x5) \ + V(sar, 0x7) \ + + class Assembler : public AssemblerBase { private: // We check before assembling an instruction that there is sufficient @@ -680,6 +694,8 @@ class Assembler : public AssemblerBase { // - Instructions on 64-bit (quadword) operands/registers use 'q'. // - Instructions on operands/registers with pointer size use 'p'. + STATIC_ASSERT(kPointerSize == kInt64Size || kPointerSize == kInt32Size); + #define DECLARE_INSTRUCTION(instruction) \ template<class P1> \ void instruction##p(P1 p1) { \ @@ -776,7 +792,9 @@ class Assembler : public AssemblerBase { void movq(Register dst, int64_t value); void movq(Register dst, uint64_t value); + void movsxbl(Register dst, const Operand& src); void movsxbq(Register dst, const Operand& src); + void movsxwl(Register dst, const Operand& src); void movsxwq(Register dst, const Operand& src); void movsxlq(Register dst, Register src); void movsxlq(Register dst, const Operand& src); @@ -806,15 +824,15 @@ class Assembler : public AssemblerBase { void cmpb_al(Immediate src); void cmpb(Register dst, Register src) { - arithmetic_op(0x3A, dst, src); + arithmetic_op_8(0x3A, dst, src); } void cmpb(Register dst, const Operand& src) { - arithmetic_op(0x3A, dst, src); + arithmetic_op_8(0x3A, dst, src); } void cmpb(const Operand& dst, Register src) { - arithmetic_op(0x38, src, dst); + arithmetic_op_8(0x38, src, dst); } void cmpb(const Operand& dst, Immediate src) { @@ -856,33 +874,32 @@ class Assembler : public AssemblerBase { // Multiply rax by src, put the result in rdx:rax. void mul(Register src); - void rcl(Register dst, Immediate imm8) { - shift(dst, imm8, 0x2); - } - - void rol(Register dst, Immediate imm8) { - shift(dst, imm8, 0x0); - } - - void roll(Register dst, Immediate imm8) { - shift_32(dst, imm8, 0x0); - } - - void rcr(Register dst, Immediate imm8) { - shift(dst, imm8, 0x3); - } - - void ror(Register dst, Immediate imm8) { - shift(dst, imm8, 0x1); - } - - void rorl(Register dst, Immediate imm8) { - shift_32(dst, imm8, 0x1); - } - - void rorl_cl(Register dst) { - shift_32(dst, 0x1); - } +#define DECLARE_SHIFT_INSTRUCTION(instruction, subcode) \ + void instruction##p(Register dst, Immediate imm8) { \ + shift(dst, imm8, subcode, kPointerSize); \ + } \ + \ + void instruction##l(Register dst, Immediate imm8) { \ + shift(dst, imm8, subcode, kInt32Size); \ + } \ + \ + void instruction##q(Register dst, Immediate imm8) { \ + shift(dst, imm8, subcode, kInt64Size); \ + } \ + \ + void instruction##p_cl(Register dst) { \ + shift(dst, subcode, kPointerSize); \ + } \ + \ + void instruction##l_cl(Register dst) { \ + shift(dst, subcode, kInt32Size); \ + } \ + \ + void instruction##q_cl(Register dst) { \ + shift(dst, subcode, kInt64Size); \ + } + SHIFT_INSTRUCTION_LIST(DECLARE_SHIFT_INSTRUCTION) +#undef DECLARE_SHIFT_INSTRUCTION // Shifts dst:src left by cl bits, affecting only dst. void shld(Register dst, Register src); @@ -890,60 +907,6 @@ class Assembler : public AssemblerBase { // Shifts src:dst right by cl bits, affecting only dst. void shrd(Register dst, Register src); - // Shifts dst right, duplicating sign bit, by shift_amount bits. - // Shifting by 1 is handled efficiently. - void sar(Register dst, Immediate shift_amount) { - shift(dst, shift_amount, 0x7); - } - - // Shifts dst right, duplicating sign bit, by shift_amount bits. - // Shifting by 1 is handled efficiently. - void sarl(Register dst, Immediate shift_amount) { - shift_32(dst, shift_amount, 0x7); - } - - // Shifts dst right, duplicating sign bit, by cl % 64 bits. - void sar_cl(Register dst) { - shift(dst, 0x7); - } - - // Shifts dst right, duplicating sign bit, by cl % 64 bits. - void sarl_cl(Register dst) { - shift_32(dst, 0x7); - } - - void shl(Register dst, Immediate shift_amount) { - shift(dst, shift_amount, 0x4); - } - - void shl_cl(Register dst) { - shift(dst, 0x4); - } - - void shll_cl(Register dst) { - shift_32(dst, 0x4); - } - - void shll(Register dst, Immediate shift_amount) { - shift_32(dst, shift_amount, 0x4); - } - - void shr(Register dst, Immediate shift_amount) { - shift(dst, shift_amount, 0x5); - } - - void shr_cl(Register dst) { - shift(dst, 0x5); - } - - void shrl_cl(Register dst) { - shift_32(dst, 0x5); - } - - void shrl(Register dst, Immediate shift_amount) { - shift_32(dst, shift_amount, 0x5); - } - void store_rax(void* dst, RelocInfo::Mode mode); void store_rax(ExternalReference ref); @@ -1214,7 +1177,7 @@ class Assembler : public AssemblerBase { void RecordComment(const char* msg, bool force = false); // Allocate a constant pool of the correct size for the generated code. - MaybeObject* AllocateConstantPool(Heap* heap); + Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate); // Generate the constant pool for the generated code. void PopulateConstantPool(ConstantPoolArray* constant_pool); @@ -1425,14 +1388,16 @@ class Assembler : public AssemblerBase { // AND, OR, XOR, or CMP. The encodings of these operations are all // similar, differing just in the opcode or in the reg field of the // ModR/M byte. + void arithmetic_op_8(byte opcode, Register reg, Register rm_reg); + void arithmetic_op_8(byte opcode, Register reg, const Operand& rm_reg); void arithmetic_op_16(byte opcode, Register reg, Register rm_reg); void arithmetic_op_16(byte opcode, Register reg, const Operand& rm_reg); - void arithmetic_op_32(byte opcode, Register reg, Register rm_reg); - void arithmetic_op_32(byte opcode, Register reg, const Operand& rm_reg); - void arithmetic_op(byte opcode, Register reg, Register rm_reg); - void arithmetic_op(byte opcode, Register reg, const Operand& rm_reg); - void immediate_arithmetic_op(byte subcode, Register dst, Immediate src); - void immediate_arithmetic_op(byte subcode, const Operand& dst, Immediate src); + // Operate on operands/registers with pointer size, 32-bit or 64-bit size. + void arithmetic_op(byte opcode, Register reg, Register rm_reg, int size); + void arithmetic_op(byte opcode, + Register reg, + const Operand& rm_reg, + int size); // Operate on a byte in memory or register. void immediate_arithmetic_op_8(byte subcode, Register dst, @@ -1447,20 +1412,20 @@ class Assembler : public AssemblerBase { void immediate_arithmetic_op_16(byte subcode, const Operand& dst, Immediate src); - // Operate on a 32-bit word in memory or register. - void immediate_arithmetic_op_32(byte subcode, - Register dst, - Immediate src); - void immediate_arithmetic_op_32(byte subcode, - const Operand& dst, - Immediate src); + // Operate on operands/registers with pointer size, 32-bit or 64-bit size. + void immediate_arithmetic_op(byte subcode, + Register dst, + Immediate src, + int size); + void immediate_arithmetic_op(byte subcode, + const Operand& dst, + Immediate src, + int size); // Emit machine code for a shift operation. - void shift(Register dst, Immediate shift_amount, int subcode); - void shift_32(Register dst, Immediate shift_amount, int subcode); + void shift(Register dst, Immediate shift_amount, int subcode, int size); // Shift dst by cl % 64 bits. - void shift(Register dst, int subcode); - void shift_32(Register dst, int subcode); + void shift(Register dst, int subcode, int size); void emit_farith(int b1, int b2, int i); @@ -1473,138 +1438,63 @@ class Assembler : public AssemblerBase { // Arithmetics void emit_add(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x03, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x03, dst, src); - } + arithmetic_op(0x03, dst, src, size); } void emit_add(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x0, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x0, dst, src); - } + immediate_arithmetic_op(0x0, dst, src, size); } void emit_add(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x03, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x03, dst, src); - } + arithmetic_op(0x03, dst, src, size); } void emit_add(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x1, src, dst); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x1, src, dst); - } + arithmetic_op(0x1, src, dst, size); } void emit_add(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x0, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x0, dst, src); - } + immediate_arithmetic_op(0x0, dst, src, size); } void emit_and(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x23, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x23, dst, src); - } + arithmetic_op(0x23, dst, src, size); } void emit_and(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x23, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x23, dst, src); - } + arithmetic_op(0x23, dst, src, size); } void emit_and(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x21, src, dst); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x21, src, dst); - } + arithmetic_op(0x21, src, dst, size); } void emit_and(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x4, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x4, dst, src); - } + immediate_arithmetic_op(0x4, dst, src, size); } void emit_and(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x4, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x4, dst, src); - } + immediate_arithmetic_op(0x4, dst, src, size); } void emit_cmp(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x3B, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x3B, dst, src); - } + arithmetic_op(0x3B, dst, src, size); } void emit_cmp(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x3B, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x3B, dst, src); - } + arithmetic_op(0x3B, dst, src, size); } void emit_cmp(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x39, src, dst); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x39, src, dst); - } + arithmetic_op(0x39, src, dst, size); } void emit_cmp(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x7, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x7, dst, src); - } + immediate_arithmetic_op(0x7, dst, src, size); } void emit_cmp(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x7, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x7, dst, src); - } + immediate_arithmetic_op(0x7, dst, src, size); } void emit_dec(Register dst, int size); @@ -1644,99 +1534,49 @@ class Assembler : public AssemblerBase { void emit_not(const Operand& dst, int size); void emit_or(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x0B, dst, src); - } else { - arithmetic_op_32(0x0B, dst, src); - } + arithmetic_op(0x0B, dst, src, size); } void emit_or(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x0B, dst, src); - } else { - arithmetic_op_32(0x0B, dst, src); - } + arithmetic_op(0x0B, dst, src, size); } void emit_or(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x9, src, dst); - } else { - arithmetic_op_32(0x9, src, dst); - } + arithmetic_op(0x9, src, dst, size); } void emit_or(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x1, dst, src); - } else { - immediate_arithmetic_op_32(0x1, dst, src); - } + immediate_arithmetic_op(0x1, dst, src, size); } void emit_or(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x1, dst, src); - } else { - immediate_arithmetic_op_32(0x1, dst, src); - } + immediate_arithmetic_op(0x1, dst, src, size); } void emit_repmovs(int size); void emit_sbb(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x1b, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x1b, dst, src); - } + arithmetic_op(0x1b, dst, src, size); } void emit_sub(Register dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x2B, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x2B, dst, src); - } + arithmetic_op(0x2B, dst, src, size); } void emit_sub(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x5, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x5, dst, src); - } + immediate_arithmetic_op(0x5, dst, src, size); } void emit_sub(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x2B, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x2B, dst, src); - } + arithmetic_op(0x2B, dst, src, size); } void emit_sub(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x29, src, dst); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x29, src, dst); - } + arithmetic_op(0x29, src, dst, size); } void emit_sub(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x5, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x5, dst, src); - } + immediate_arithmetic_op(0x5, dst, src, size); } void emit_test(Register dst, Register src, int size); @@ -1748,52 +1588,29 @@ class Assembler : public AssemblerBase { void emit_xchg(Register dst, Register src, int size); void emit_xor(Register dst, Register src, int size) { - if (size == kInt64Size) { - if (dst.code() == src.code()) { - arithmetic_op_32(0x33, dst, src); - } else { - arithmetic_op(0x33, dst, src); - } + if (size == kInt64Size && dst.code() == src.code()) { + // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore + // there is no need to make this a 64 bit operation. + arithmetic_op(0x33, dst, src, kInt32Size); } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x33, dst, src); + arithmetic_op(0x33, dst, src, size); } } void emit_xor(Register dst, const Operand& src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x33, dst, src); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x33, dst, src); - } + arithmetic_op(0x33, dst, src, size); } void emit_xor(Register dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x6, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x6, dst, src); - } + immediate_arithmetic_op(0x6, dst, src, size); } void emit_xor(const Operand& dst, Immediate src, int size) { - if (size == kInt64Size) { - immediate_arithmetic_op(0x6, dst, src); - } else { - ASSERT(size == kInt32Size); - immediate_arithmetic_op_32(0x6, dst, src); - } + immediate_arithmetic_op(0x6, dst, src, size); } void emit_xor(const Operand& dst, Register src, int size) { - if (size == kInt64Size) { - arithmetic_op(0x31, src, dst); - } else { - ASSERT(size == kInt32Size); - arithmetic_op_32(0x31, src, dst); - } + arithmetic_op(0x31, src, dst, size); } friend class CodePatcher; diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc index d5b1a7386..9e3b89ac6 100644 --- a/deps/v8/src/x64/builtins-x64.cc +++ b/deps/v8/src/x64/builtins-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -163,13 +140,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, if (FLAG_inline_new) { Label undo_allocation; -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = ExternalReference::debug_step_in_fp_address(masm->isolate()); __ Move(kScratchRegister, debug_step_in_fp); __ cmpp(Operand(kScratchRegister, 0), Immediate(0)); __ j(not_equal, &rt_call); -#endif // Verified that the constructor is a JSFunction. // Load the initial map and verify that it is in fact a map. @@ -214,7 +189,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Now allocate the JSObject on the heap. __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); - __ shl(rdi, Immediate(kPointerSizeLog2)); + __ shlp(rdi, Immediate(kPointerSizeLog2)); if (create_memento) { __ addp(rdi, Immediate(AllocationMemento::kSize)); } @@ -600,7 +575,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // No type feedback cell is available __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); // Expects rdi to hold function pointer. - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); __ CallStub(&stub); } else { ParameterCount actual(rax); @@ -749,7 +724,7 @@ static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, // Tear down internal frame. } - __ Pop(MemOperand(rsp, 0)); // Ignore state offset + __ DropUnderReturnAddress(1); // Ignore state offset __ ret(0); // Return to IC Miss stub, continuation still on stack. } @@ -926,12 +901,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ bind(&shift_arguments); { Label loop; __ movp(rcx, rax); + StackArgumentsAccessor args(rsp, rcx); __ bind(&loop); - __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0)); - __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); + __ movp(rbx, args.GetArgumentOperand(1)); + __ movp(args.GetArgumentOperand(0), rbx); __ decp(rcx); - __ j(not_sign, &loop); // While non-negative (to copy return address). - __ popq(rbx); // Discard copy of return address. + __ j(not_zero, &loop); // While non-zero. + __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address. __ decp(rax); // One fewer argument (first argument is new receiver). } @@ -963,9 +939,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // expected arguments matches what we're providing. If so, jump // (tail-call) to the code in register edx without checking arguments. __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - __ movsxlq(rbx, - FieldOperand(rdx, - SharedFunctionInfo::kFormalParameterCountOffset)); + __ LoadSharedFunctionInfoSpecialField(rbx, rdx, + SharedFunctionInfo::kFormalParameterCountOffset); __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); __ cmpp(rax, rbx); __ j(not_equal, @@ -1018,7 +993,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // Out of stack space. __ Push(Operand(rbp, kFunctionOffset)); __ Push(rax); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); __ bind(&okay); // End of stack check. @@ -1323,6 +1298,32 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { } +static void ArgumentsAdaptorStackCheck(MacroAssembler* masm, + Label* stack_overflow) { + // ----------- S t a t e ------------- + // -- rax : actual number of arguments + // -- rbx : expected number of arguments + // -- rdi: function (passed through to callee) + // ----------------------------------- + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + Label okay; + __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex); + __ movp(rcx, rsp); + // Make rcx the space we have left. The stack might already be overflowed + // here which will cause rcx to become negative. + __ subp(rcx, rdx); + // Make rdx the space we need for the array when it is unrolled onto the + // stack. + __ movp(rdx, rbx); + __ shlp(rdx, Immediate(kPointerSizeLog2)); + // Check if the arguments will overflow the stack. + __ cmpp(rcx, rdx); + __ j(less_equal, stack_overflow); // Signed comparison. +} + + static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ pushq(rbp); __ movp(rbp, rsp); @@ -1368,6 +1369,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->arguments_adaptors(), 1); + Label stack_overflow; + ArgumentsAdaptorStackCheck(masm, &stack_overflow); + Label enough, too_few; __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); __ cmpp(rax, rbx); @@ -1440,6 +1444,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ------------------------------------------- __ bind(&dont_adapt_arguments); __ jmp(rdx); + + __ bind(&stack_overflow); + { + FrameScope frame(masm, StackFrame::MANUAL); + EnterArgumentsAdaptorFrame(masm); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + __ int3(); + } } diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc index c949a423a..546595ad4 100644 --- a/deps/v8/src/x64/code-stubs-x64.cc +++ b/deps/v8/src/x64/code-stubs-x64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -40,7 +17,6 @@ namespace internal { void FastNewClosureStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rbx }; descriptor->register_param_count_ = 1; @@ -51,7 +27,6 @@ void FastNewClosureStub::InitializeInterfaceDescriptor( void FastNewContextStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdi }; descriptor->register_param_count_ = 1; @@ -61,7 +36,6 @@ void FastNewContextStub::InitializeInterfaceDescriptor( void ToNumberStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax }; descriptor->register_param_count_ = 1; @@ -71,7 +45,6 @@ void ToNumberStub::InitializeInterfaceDescriptor( void NumberToStringStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax }; descriptor->register_param_count_ = 1; @@ -82,7 +55,6 @@ void NumberToStringStub::InitializeInterfaceDescriptor( void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax, rbx, rcx }; descriptor->register_param_count_ = 3; @@ -94,7 +66,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax, rbx, rcx, rdx }; descriptor->register_param_count_ = 4; @@ -105,7 +76,6 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rbx, rdx }; descriptor->register_param_count_ = 2; @@ -115,7 +85,6 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor( void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rax }; descriptor->register_param_count_ = 2; @@ -126,7 +95,6 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rax }; descriptor->register_param_count_ = 2; @@ -137,7 +105,6 @@ void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( void RegExpConstructResultStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rcx, rbx, rax }; descriptor->register_param_count_ = 3; @@ -148,7 +115,6 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor( void LoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax }; descriptor->register_param_count_ = 1; @@ -158,7 +124,6 @@ void LoadFieldStub::InitializeInterfaceDescriptor( void KeyedLoadFieldStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx }; descriptor->register_param_count_ = 1; @@ -168,7 +133,6 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor( void StringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax, rcx }; descriptor->register_param_count_ = 2; @@ -178,7 +142,6 @@ void StringLengthStub::InitializeInterfaceDescriptor( void KeyedStringLengthStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rax }; descriptor->register_param_count_ = 2; @@ -188,7 +151,6 @@ void KeyedStringLengthStub::InitializeInterfaceDescriptor( void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rcx, rax }; descriptor->register_param_count_ = 3; @@ -199,7 +161,6 @@ void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( void TransitionElementsKindStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax, rbx }; descriptor->register_param_count_ = 2; @@ -210,7 +171,6 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor( static void InitializeArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -239,7 +199,6 @@ static void InitializeArrayConstructorDescriptor( static void InitializeInternalArrayConstructorDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor, int constant_stack_parameter_count) { // register state @@ -267,49 +226,42 @@ static void InitializeInternalArrayConstructorDescriptor( void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeArrayConstructorDescriptor(descriptor, 0); } void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeArrayConstructorDescriptor(descriptor, 1); } void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeArrayConstructorDescriptor(descriptor, -1); } void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0); + InitializeInternalArrayConstructorDescriptor(descriptor, 0); } void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1); + InitializeInternalArrayConstructorDescriptor(descriptor, 1); } void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); + InitializeInternalArrayConstructorDescriptor(descriptor, -1); } void CompareNilICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax }; descriptor->register_param_count_ = 1; @@ -317,12 +269,11 @@ void CompareNilICStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(CompareNilIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); } void ToBooleanStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax }; descriptor->register_param_count_ = 1; @@ -330,12 +281,11 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); } void StoreGlobalStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rcx, rax }; descriptor->register_param_count_ = 3; @@ -346,7 +296,6 @@ void StoreGlobalStub::InitializeInterfaceDescriptor( void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rax, rbx, rcx, rdx }; descriptor->register_param_count_ = 4; @@ -357,19 +306,17 @@ void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( void BinaryOpICStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rax }; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); + ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); } void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rcx, rdx, rax }; descriptor->register_param_count_ = 3; @@ -380,7 +327,6 @@ void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( void StringAddStub::InitializeInterfaceDescriptor( - Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { static Register registers[] = { rdx, rax }; descriptor->register_param_count_ = 2; @@ -479,10 +425,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { // Update the static counter each time a new code stub is generated. - Isolate* isolate = masm->isolate(); - isolate->counters()->code_stubs()->Increment(); + isolate()->counters()->code_stubs()->Increment(); - CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); + CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); int param_count = descriptor->register_param_count_; { // Call the runtime system in a fresh internal frame. @@ -506,11 +451,11 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { const int argument_count = 1; __ PrepareCallCFunction(argument_count); __ LoadAddress(arg_reg_1, - ExternalReference::isolate_address(masm->isolate())); + ExternalReference::isolate_address(isolate())); AllowExternalCallThatCantCauseGC scope(masm); __ CallCFunction( - ExternalReference::store_buffer_overflow_function(masm->isolate()), + ExternalReference::store_buffer_overflow_function(isolate()), argument_count); __ PopCallerSaved(save_doubles_); __ ret(0); @@ -871,11 +816,11 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ Cvtlsi2sd(double_exponent, exponent); // Returning or bailing out. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); if (exponent_type_ == ON_STACK) { // The arguments are still on the stack. __ bind(&call_runtime); - __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); + __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); // The stub is called from non-optimized code, which expects the result // as heap number in rax. @@ -893,7 +838,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(2); __ CallCFunction( - ExternalReference::power_double_double_function(masm->isolate()), 2); + ExternalReference::power_double_double_function(isolate()), 2); } // Return value is in xmm0. __ movsd(double_result, xmm0); @@ -914,7 +859,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- - __ Cmp(rax, masm->isolate()->factory()->prototype_string()); + __ Cmp(rax, isolate()->factory()->prototype_string()); __ j(not_equal, &miss); receiver = rdx; } else { @@ -1000,7 +945,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { // rbx: the mapped parameter count (untagged) // rax: the allocated object (tagged). - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); @@ -1363,11 +1308,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ARGUMENTS_DONT_CONTAIN_RECEIVER); Label runtime; // Ensure that a RegExp stack is allocated. - Isolate* isolate = masm->isolate(); ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(isolate); + ExternalReference::address_of_regexp_stack_memory_address(isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(isolate); + ExternalReference::address_of_regexp_stack_memory_size(isolate()); __ Load(kScratchRegister, address_of_regexp_stack_memory_size); __ testp(kScratchRegister, kScratchRegister); __ j(zero, &runtime); @@ -1519,7 +1463,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // rcx: encoding of subject string (1 if ASCII 0 if two_byte); // r11: code // All checks done. Now push arguments for native regexp code. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->regexp_entry_native(), 1); // Isolates: note we add an additional parameter here (isolate pointer). @@ -1530,7 +1474,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 9: Pass current isolate address. __ LoadAddress(kScratchRegister, - ExternalReference::isolate_address(masm->isolate())); + ExternalReference::isolate_address(isolate())); __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), kScratchRegister); @@ -1556,8 +1500,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { #endif // Argument 5: static offsets vector buffer. - __ LoadAddress(r8, - ExternalReference::address_of_static_offsets_vector(isolate)); + __ LoadAddress( + r8, ExternalReference::address_of_static_offsets_vector(isolate())); // Argument 5 passed in r8 on Linux and on the stack on Windows. #ifdef _WIN64 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); @@ -1682,8 +1626,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { kDontSaveFPRegs); // Get the static offsets vector filled by the native regexp code. - __ LoadAddress(rcx, - ExternalReference::address_of_static_offsets_vector(isolate)); + __ LoadAddress( + rcx, ExternalReference::address_of_static_offsets_vector(isolate())); // rbx: last_match_info backing store (FixedArray) // rcx: offsets vector @@ -1716,7 +1660,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. ExternalReference pending_exception_address( - Isolate::kPendingExceptionAddress, isolate); + Isolate::kPendingExceptionAddress, isolate()); Operand pending_exception_operand = masm->ExternalOperand(pending_exception_address, rbx); __ movp(rax, pending_exception_operand); @@ -1829,7 +1773,7 @@ static void BranchIfNotInternalizedString(MacroAssembler* masm, void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { Label check_unequal_objects, done; Condition cc = GetCondition(); - Factory* factory = masm->isolate()->factory(); + Factory* factory = isolate()->factory(); Label miss; CheckInputType(masm, rdx, left_, &miss); @@ -2163,7 +2107,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ Push(rdx); __ Push(rbx); - CreateAllocationSiteStub create_stub; + CreateAllocationSiteStub create_stub(isolate); __ CallStub(&create_stub); __ Pop(rbx); @@ -2197,14 +2141,77 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { } +static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { + // Do not transform the receiver for strict mode functions. + __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), + Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); + __ j(not_equal, cont); + + // Do not transform the receiver for natives. + // SharedFunctionInfo is already loaded into rcx. + __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), + Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); + __ j(not_equal, cont); +} + + +static void EmitSlowCase(Isolate* isolate, + MacroAssembler* masm, + StackArgumentsAccessor* args, + int argc, + Label* non_function) { + // Check for function proxy. + __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, non_function); + __ PopReturnAddressTo(rcx); + __ Push(rdi); // put proxy as additional argument under return address + __ PushReturnAddressFrom(rcx); + __ Set(rax, argc + 1); + __ Set(rbx, 0); + __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); + { + Handle<Code> adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ movp(args->GetReceiverOperand(), rdi); + __ Set(rax, argc); + __ Set(rbx, 0); + __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); + Handle<Code> adaptor = + isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); +} + + +static void EmitWrapCase(MacroAssembler* masm, + StackArgumentsAccessor* args, + Label* cont) { + // Wrap the receiver and patch it back onto the stack. + { FrameScope frame_scope(masm, StackFrame::INTERNAL); + __ Push(rdi); + __ Push(rax); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ Pop(rdi); + } + __ movp(args->GetReceiverOperand(), rax); + __ jmp(cont); +} + + void CallFunctionStub::Generate(MacroAssembler* masm) { - // rbx : feedback vector - // rdx : (only if rbx is not the megamorphic symbol) slot in feedback - // vector (Smi) // rdi : the function to call + + // wrap_and_call can only be true if we are compiling a monomorphic method. Isolate* isolate = masm->isolate(); Label slow, non_function, wrap, cont; - StackArgumentsAccessor args(rsp, argc_); + int argc = argc_; + StackArgumentsAccessor args(rsp, argc); if (NeedsChecks()) { // Check that the function really is a JavaScript function. @@ -2213,35 +2220,16 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Goto slow case if we do not have a function. __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); __ j(not_equal, &slow); - - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm); - // Type information was updated. Because we may call Array, which - // expects either undefined or an AllocationSite in rbx we need - // to set rbx to undefined. - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); - } } // Fast-case: Just invoke the function. - ParameterCount actual(argc_); + ParameterCount actual(argc); if (CallAsMethod()) { if (NeedsChecks()) { - // Do not transform the receiver for strict mode functions. - __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), - Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); - __ j(not_equal, &cont); - - // Do not transform the receiver for natives. - // SharedFunctionInfo is already loaded into rcx. - __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), - Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); - __ j(not_equal, &cont); + EmitContinueIfStrictOrNative(masm, &cont); } - // Load the receiver from the stack. __ movp(rax, args.GetReceiverOperand()); @@ -2256,59 +2244,18 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ bind(&cont); } + __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); if (NeedsChecks()) { // Slow-case: Non-function called. __ bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable - // object (megamorphic symbol) so no write barrier is needed. - __ SmiToInteger32(rdx, rdx); - __ Move(FieldOperand(rbx, rdx, times_pointer_size, - FixedArray::kHeaderSize), - TypeFeedbackInfo::MegamorphicSentinel(isolate)); - __ Integer32ToSmi(rdx, rdx); - } - // Check for function proxy. - __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function); - __ PopReturnAddressTo(rcx); - __ Push(rdi); // put proxy as additional argument under return address - __ PushReturnAddressFrom(rcx); - __ Set(rax, argc_ + 1); - __ Set(rbx, 0); - __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); - { - Handle<Code> adaptor = - masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); - __ jmp(adaptor, RelocInfo::CODE_TARGET); - } - - // CALL_NON_FUNCTION expects the non-function callee as receiver (instead - // of the original receiver from the call site). - __ bind(&non_function); - __ movp(args.GetReceiverOperand(), rdi); - __ Set(rax, argc_); - __ Set(rbx, 0); - __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); - Handle<Code> adaptor = - isolate->builtins()->ArgumentsAdaptorTrampoline(); - __ Jump(adaptor, RelocInfo::CODE_TARGET); + EmitSlowCase(isolate, masm, &args, argc, &non_function); } if (CallAsMethod()) { __ bind(&wrap); - // Wrap the receiver and patch it back onto the stack. - { FrameScope frame_scope(masm, StackFrame::INTERNAL); - __ Push(rdi); - __ Push(rax); - __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); - __ Pop(rdi); - } - __ movp(args.GetReceiverOperand(), rax); - __ jmp(&cont); + EmitWrapCase(masm, &args, &cont); } } @@ -2374,11 +2321,125 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ bind(&do_call); // Set expected number of arguments to zero (not changing rax). __ Set(rbx, 0); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), RelocInfo::CODE_TARGET); } +static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { + __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); + __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); + __ movp(vector, FieldOperand(vector, + SharedFunctionInfo::kFeedbackVectorOffset)); +} + + +void CallICStub::Generate(MacroAssembler* masm) { + // rdi - function + // rbx - vector + // rdx - slot id + Isolate* isolate = masm->isolate(); + Label extra_checks_or_miss, slow_start; + Label slow, non_function, wrap, cont; + Label have_js_function; + int argc = state_.arg_count(); + StackArgumentsAccessor args(rsp, argc); + ParameterCount actual(argc); + + EmitLoadTypeFeedbackVector(masm, rbx); + + // The checks. First, does rdi match the recorded monomorphic target? + __ SmiToInteger32(rdx, rdx); + __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize)); + __ j(not_equal, &extra_checks_or_miss); + + __ bind(&have_js_function); + if (state_.CallAsMethod()) { + EmitContinueIfStrictOrNative(masm, &cont); + + // Load the receiver from the stack. + __ movp(rax, args.GetReceiverOperand()); + + __ JumpIfSmi(rax, &wrap); + + __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); + __ j(below, &wrap); + + __ bind(&cont); + } + + __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); + + __ bind(&slow); + EmitSlowCase(isolate, masm, &args, argc, &non_function); + + if (state_.CallAsMethod()) { + __ bind(&wrap); + EmitWrapCase(masm, &args, &cont); + } + + __ bind(&extra_checks_or_miss); + Label miss; + + __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize)); + __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); + __ j(equal, &slow_start); + __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); + __ j(equal, &miss); + + if (!FLAG_trace_ic) { + // We are going megamorphic, and we don't want to visit the runtime. + __ Move(FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize), + TypeFeedbackInfo::MegamorphicSentinel(isolate)); + __ jmp(&slow_start); + } + + // We are here because tracing is on or we are going monomorphic. + __ bind(&miss); + GenerateMiss(masm); + + // the slow case + __ bind(&slow_start); + // Check that function is not a smi. + __ JumpIfSmi(rdi, &non_function); + // Check that function is a JSFunction. + __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); + __ j(not_equal, &slow); + __ jmp(&have_js_function); + + // Unreachable + __ int3(); +} + + +void CallICStub::GenerateMiss(MacroAssembler* masm) { + // Get the receiver of the function from the stack; 1 ~ return address. + __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize)); + + { + FrameScope scope(masm, StackFrame::INTERNAL); + + // Push the receiver and the function and feedback info. + __ Push(rcx); + __ Push(rdi); + __ Push(rbx); + __ Integer32ToSmi(rdx, rdx); + __ Push(rdx); + + // Call the entry. + ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), + masm->isolate()); + __ CallExternalReference(miss, 4); + + // Move result to edi and exit the internal frame. + __ movp(rdi, rax); + } +} + + bool CEntryStub::NeedsImmovableCode() { return false; } @@ -2401,26 +2462,35 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) { void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { - CEntryStub stub(1, kDontSaveFPRegs); - stub.GetCode(isolate); - CEntryStub save_doubles(1, kSaveFPRegs); - save_doubles.GetCode(isolate); + CEntryStub stub(isolate, 1, kDontSaveFPRegs); + stub.GetCode(); + CEntryStub save_doubles(isolate, 1, kSaveFPRegs); + save_doubles.GetCode(); } -void CEntryStub::GenerateCore(MacroAssembler* masm, - Label* throw_normal_exception, - Label* throw_termination_exception, - bool do_gc, - bool always_allocate_scope) { - // rax: result parameter for PerformGC, if any. - // rbx: pointer to C function (C callee-saved). - // rbp: frame pointer (restored after C call). - // rsp: stack pointer (restored after C call). +void CEntryStub::Generate(MacroAssembler* masm) { + // rax: number of arguments including receiver + // rbx: pointer to C function (C callee-saved) + // rbp: frame pointer of calling JS frame (restored after C call) + // rsp: stack pointer (restored after C call) + // rsi: current context (restored) + + ProfileEntryHookStub::MaybeCallEntryHook(masm); + + // Enter the exit frame that transitions from JavaScript to C++. +#ifdef _WIN64 + int arg_stack_space = (result_size_ < 2 ? 2 : 4); +#else + int arg_stack_space = 0; +#endif + __ EnterExitFrame(arg_stack_space, save_doubles_); + + // rbx: pointer to builtin function (C callee-saved). + // rbp: frame pointer of exit frame (restored after C call). + // rsp: stack pointer (restored after C call). // r14: number of arguments including receiver (C callee-saved). - // r15: pointer to the first argument (C callee-saved). - // This pointer is reused in LeaveExitFrame(), so it is stored in a - // callee-saved register. + // r15: argv pointer (C callee-saved). // Simple results returned in rax (both AMD64 and Win64 calling conventions). // Complex results must be written to address passed as first argument. @@ -2431,25 +2501,6 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ CheckStackAlignment(); } - if (do_gc) { - // Pass failure code returned from last attempt as first argument to - // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the - // stack is known to be aligned. This function takes one argument which is - // passed in register. - __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); - __ movp(arg_reg_1, rax); - __ Move(kScratchRegister, - ExternalReference::perform_gc_function(masm->isolate())); - __ call(kScratchRegister); - } - - ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); - if (always_allocate_scope) { - Operand scope_depth_operand = masm->ExternalOperand(scope_depth); - __ incl(scope_depth_operand); - } - // Call C function. #ifdef _WIN64 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. @@ -2460,7 +2511,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // Return result in single register (rax). __ movp(rcx, r14); // argc. __ movp(rdx, r15); // argv. - __ Move(r8, ExternalReference::isolate_address(masm->isolate())); + __ Move(r8, ExternalReference::isolate_address(isolate())); } else { ASSERT_EQ(2, result_size_); // Pass a pointer to the result location as the first argument. @@ -2468,26 +2519,18 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // Pass a pointer to the Arguments object as the second argument. __ movp(rdx, r14); // argc. __ movp(r8, r15); // argv. - __ Move(r9, ExternalReference::isolate_address(masm->isolate())); + __ Move(r9, ExternalReference::isolate_address(isolate())); } #else // _WIN64 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. __ movp(rdi, r14); // argc. __ movp(rsi, r15); // argv. - __ Move(rdx, ExternalReference::isolate_address(masm->isolate())); + __ Move(rdx, ExternalReference::isolate_address(isolate())); #endif __ call(rbx); // Result is in rax - do not destroy this register! - if (always_allocate_scope) { - Operand scope_depth_operand = masm->ExternalOperand(scope_depth); - __ decl(scope_depth_operand); - } - - // Check for failure result. - Label failure_returned; - STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); #ifdef _WIN64 // If return value is on the stack, pop it to registers. if (result_size_ > 1) { @@ -2499,121 +2542,65 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ movq(rdx, Operand(rsp, 7 * kRegisterSize)); } #endif - __ leap(rcx, Operand(rax, 1)); - // Lower 2 bits of rcx are 0 iff rax has failure tag. - __ testl(rcx, Immediate(kFailureTagMask)); - __ j(zero, &failure_returned); + + // Runtime functions should not return 'the hole'. Allowing it to escape may + // lead to crashes in the IC code later. + if (FLAG_debug_code) { + Label okay; + __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); + __ j(not_equal, &okay, Label::kNear); + __ int3(); + __ bind(&okay); + } + + // Check result for exception sentinel. + Label exception_returned; + __ CompareRoot(rax, Heap::kExceptionRootIndex); + __ j(equal, &exception_returned); + + ExternalReference pending_exception_address( + Isolate::kPendingExceptionAddress, isolate()); + + // Check that there is no pending exception, otherwise we + // should have returned the exception sentinel. + if (FLAG_debug_code) { + Label okay; + __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); + Operand pending_exception_operand = + masm->ExternalOperand(pending_exception_address); + __ cmpp(r14, pending_exception_operand); + __ j(equal, &okay, Label::kNear); + __ int3(); + __ bind(&okay); + } // Exit the JavaScript to C++ exit frame. __ LeaveExitFrame(save_doubles_); __ ret(0); - // Handling of failure. - __ bind(&failure_returned); - - Label retry; - // If the returned exception is RETRY_AFTER_GC continue at retry label - STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); - __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); - __ j(zero, &retry, Label::kNear); + // Handling of exception. + __ bind(&exception_returned); // Retrieve the pending exception. - ExternalReference pending_exception_address( - Isolate::kPendingExceptionAddress, masm->isolate()); Operand pending_exception_operand = masm->ExternalOperand(pending_exception_address); __ movp(rax, pending_exception_operand); // Clear the pending exception. - pending_exception_operand = - masm->ExternalOperand(pending_exception_address); __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); __ movp(pending_exception_operand, rdx); // Special handling of termination exceptions which are uncatchable // by javascript code. + Label throw_termination_exception; __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); - __ j(equal, throw_termination_exception); + __ j(equal, &throw_termination_exception); // Handle normal exception. - __ jmp(throw_normal_exception); - - // Retry. - __ bind(&retry); -} - - -void CEntryStub::Generate(MacroAssembler* masm) { - // rax: number of arguments including receiver - // rbx: pointer to C function (C callee-saved) - // rbp: frame pointer of calling JS frame (restored after C call) - // rsp: stack pointer (restored after C call) - // rsi: current context (restored) - - // NOTE: Invocations of builtins may return failure objects - // instead of a proper result. The builtin entry handles - // this by performing a garbage collection and retrying the - // builtin once. - - ProfileEntryHookStub::MaybeCallEntryHook(masm); - - // Enter the exit frame that transitions from JavaScript to C++. -#ifdef _WIN64 - int arg_stack_space = (result_size_ < 2 ? 2 : 4); -#else - int arg_stack_space = 0; -#endif - __ EnterExitFrame(arg_stack_space, save_doubles_); - - // rax: Holds the context at this point, but should not be used. - // On entry to code generated by GenerateCore, it must hold - // a failure result if the collect_garbage argument to GenerateCore - // is true. This failure result can be the result of code - // generated by a previous call to GenerateCore. The value - // of rax is then passed to Runtime::PerformGC. - // rbx: pointer to builtin function (C callee-saved). - // rbp: frame pointer of exit frame (restored after C call). - // rsp: stack pointer (restored after C call). - // r14: number of arguments including receiver (C callee-saved). - // r15: argv pointer (C callee-saved). - - Label throw_normal_exception; - Label throw_termination_exception; - - // Call into the runtime system. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - false, - false); - - // Do space-specific GC and retry runtime call. - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - false); - - // Do full GC and retry runtime call one final time. - Failure* failure = Failure::InternalError(); - __ Move(rax, failure, Assembler::RelocInfoNone()); - GenerateCore(masm, - &throw_normal_exception, - &throw_termination_exception, - true, - true); - - { FrameScope scope(masm, StackFrame::MANUAL); - __ PrepareCallCFunction(0); - __ CallCFunction( - ExternalReference::out_of_memory_function(masm->isolate()), 0); - } + __ Throw(rax); __ bind(&throw_termination_exception); __ ThrowUncatchable(rax); - - __ bind(&throw_normal_exception); - __ Throw(rax); } @@ -2669,17 +2656,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ InitializeRootRegister(); } - Isolate* isolate = masm->isolate(); - // Save copies of the top frame descriptor on the stack. - ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate); + ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); __ Push(c_entry_fp_operand); } // If this is the outermost JS call, set js_entry_sp value. - ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); + ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); __ Load(rax, js_entry_sp); __ testp(rax, rax); __ j(not_zero, ¬_outermost_js); @@ -2700,9 +2685,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Caught exception: Store result (exception) in the pending exception // field in the JSEnv and return a failure sentinel. ExternalReference pending_exception(Isolate::kPendingExceptionAddress, - isolate); + isolate()); __ Store(pending_exception, rax); - __ Move(rax, Failure::Exception(), Assembler::RelocInfoNone()); + __ LoadRoot(rax, Heap::kExceptionRootIndex); __ jmp(&exit); // Invoke: Link this frame into the handler chain. There's only one @@ -2724,10 +2709,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // at the time this code is generated. if (is_construct) { ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, - isolate); + isolate()); __ Load(rax, construct_entry); } else { - ExternalReference entry(Builtins::kJSEntryTrampoline, isolate); + ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); __ Load(rax, entry); } __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); @@ -2800,17 +2785,19 @@ void InstanceofStub::Generate(MacroAssembler* masm) { // indicate that the value is not an instance. static const int kOffsetToMapCheckValue = 2; - static const int kOffsetToResultValue = 18; + static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14; // The last 4 bytes of the instruction sequence - // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset)) + // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset)) // Move(kScratchRegister, Factory::the_hole_value()) // in front of the hole value address. - static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78; + static const unsigned int kWordBeforeMapCheckValue = + kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78; // The last 4 bytes of the instruction sequence // __ j(not_equal, &cache_miss); // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); // before the offset of the hole value in the root array. - static const unsigned int kWordBeforeResultValue = 0x458B4906; + static const unsigned int kWordBeforeResultValue = + kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106; // Only the inline check flag is supported on X64. ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; @@ -3121,7 +3108,8 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, // Copy from edi to esi using rep movs instruction. __ movl(kScratchRegister, count); - __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy. + // Number of doublewords to copy. + __ shrl(count, Immediate(kPointerSizeLog2)); __ repmovsp(); // Find number of bytes left. @@ -3248,7 +3236,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { // Longer than original string's length or negative: unsafe arguments. __ j(above, &runtime); // Return original string. - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1); __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); __ bind(¬_original_string); @@ -3593,7 +3581,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { __ cmpp(rdx, rax); __ j(not_equal, ¬_same, Label::kNear); __ Move(rax, Smi::FromInt(EQUAL)); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->string_compare_native(), 1); __ ret(2 * kPointerSize); @@ -3617,227 +3605,30 @@ void StringCompareStub::Generate(MacroAssembler* masm) { } -void ArrayPushStub::Generate(MacroAssembler* masm) { - int argc = arguments_count(); - - StackArgumentsAccessor args(rsp, argc); - if (argc == 0) { - // Noop, return the length. - __ movp(rax, FieldOperand(rdx, JSArray::kLengthOffset)); - __ ret((argc + 1) * kPointerSize); - return; - } - - Isolate* isolate = masm->isolate(); - - if (argc != 1) { - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - Label call_builtin, attempt_to_grow_elements, with_write_barrier; - - // Get the elements array of the object. - __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - // Check that the elements are in fast mode and writable. - __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), - isolate->factory()->fixed_array_map()); - __ j(not_equal, &call_builtin); - } - - // Get the array's length into rax and calculate new length. - __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); - STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); - __ addl(rax, Immediate(argc)); - - // Get the elements' length into rcx. - __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); - - // Check if we could survive without allocation. - __ cmpl(rax, rcx); - - if (IsFastSmiOrObjectElementsKind(elements_kind())) { - __ j(greater, &attempt_to_grow_elements); - - // Check if value is a smi. - __ movp(rcx, args.GetArgumentOperand(1)); - __ JumpIfNotSmi(rcx, &with_write_barrier); - - // Store the value. - __ movp(FieldOperand(rdi, - rax, - times_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize), - rcx); - } else { - __ j(greater, &call_builtin); - - __ movp(rcx, args.GetArgumentOperand(1)); - __ StoreNumberToDoubleElements( - rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); - } - - // Save new length. - __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); - - __ Integer32ToSmi(rax, rax); // Return new length as smi. - __ ret((argc + 1) * kPointerSize); - - if (IsFastDoubleElementsKind(elements_kind())) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ bind(&with_write_barrier); - - if (IsFastSmiElementsKind(elements_kind())) { - if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); - - __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), - isolate->factory()->heap_number_map()); - __ j(equal, &call_builtin); - - ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) - ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; - __ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); - __ movp(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset)); - __ movp(rbx, ContextOperand(rbx, Context::JS_ARRAY_MAPS_INDEX)); - const int header_size = FixedArrayBase::kHeaderSize; - // Verify that the object can be transitioned in place. - const int origin_offset = header_size + elements_kind() * kPointerSize; - __ movp(rdi, FieldOperand(rbx, origin_offset)); - __ cmpp(rdi, FieldOperand(rdx, HeapObject::kMapOffset)); - __ j(not_equal, &call_builtin); - - const int target_offset = header_size + target_kind * kPointerSize; - __ movp(rbx, FieldOperand(rbx, target_offset)); - ElementsTransitionGenerator::GenerateMapChangeElementsTransition( - masm, DONT_TRACK_ALLOCATION_SITE, NULL); - __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); - } - - // Save new length. - __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); - - // Store the value. - __ leap(rdx, FieldOperand(rdi, - rax, times_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize)); - __ movp(Operand(rdx, 0), rcx); - - __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - __ Integer32ToSmi(rax, rax); // Return new length as smi. - __ ret((argc + 1) * kPointerSize); - - __ bind(&attempt_to_grow_elements); - if (!FLAG_inline_new) { - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); - return; - } - - __ movp(rbx, args.GetArgumentOperand(1)); - // Growing elements that are SMI-only requires special handling in case the - // new element is non-Smi. For now, delegate to the builtin. - Label no_fast_elements_check; - __ JumpIfSmi(rbx, &no_fast_elements_check); - __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); - __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); - __ bind(&no_fast_elements_check); - - ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(isolate); - ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(isolate); - - const int kAllocationDelta = 4; - ASSERT(kAllocationDelta >= argc); - // Load top. - __ Load(rcx, new_space_allocation_top); - - // Check if it's the end of elements. - __ leap(rdx, FieldOperand(rdi, - rax, times_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize)); - __ cmpp(rdx, rcx); - __ j(not_equal, &call_builtin); - __ addp(rcx, Immediate(kAllocationDelta * kPointerSize)); - Operand limit_operand = masm->ExternalOperand(new_space_allocation_limit); - __ cmpp(rcx, limit_operand); - __ j(above, &call_builtin); - - // We fit and could grow elements. - __ Store(new_space_allocation_top, rcx); - - // Push the argument... - __ movp(Operand(rdx, 0), rbx); - // ... and fill the rest with holes. - __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); - for (int i = 1; i < kAllocationDelta; i++) { - __ movp(Operand(rdx, i * kPointerSize), kScratchRegister); - } - - if (IsFastObjectElementsKind(elements_kind())) { - // We know the elements array is in new space so we don't need the - // remembered set, but we just pushed a value onto it so we may have to tell - // the incremental marker to rescan the object that we just grew. We don't - // need to worry about the holes because they are in old space and already - // marked black. - __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); - } - - // Restore receiver to rdx as finish sequence assumes it's here. - __ movp(rdx, args.GetReceiverOperand()); - - // Increment element's and array's sizes. - __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), - Smi::FromInt(kAllocationDelta)); - - // Make new length a smi before returning it. - __ Integer32ToSmi(rax, rax); - __ movp(FieldOperand(rdx, JSArray::kLengthOffset), rax); - - __ ret((argc + 1) * kPointerSize); - - __ bind(&call_builtin); - __ TailCallExternalReference( - ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); -} - - void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rdx : left // -- rax : right // -- rsp[0] : return address // ----------------------------------- - Isolate* isolate = masm->isolate(); // Load rcx with the allocation site. We stick an undefined dummy value here // and replace it with the real allocation site later when we instantiate this // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). - __ Move(rcx, handle(isolate->heap()->undefined_value())); + __ Move(rcx, handle(isolate()->heap()->undefined_value())); // Make sure that we actually patched the allocation site. if (FLAG_debug_code) { __ testb(rcx, Immediate(kSmiTagMask)); __ Assert(not_equal, kExpectedAllocationSite); __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), - isolate->factory()->allocation_site_map()); + isolate()->factory()->allocation_site_map()); __ Assert(equal, kExpectedAllocationSite); } // Tail call into the stub that handles binary operations with allocation // sites. - BinaryOpWithAllocationSiteStub stub(state_); + BinaryOpWithAllocationSiteStub stub(isolate(), state_); __ TailCallStub(&stub); } @@ -3883,7 +3674,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { // Load left and right operand. Label done, left, left_smi, right_smi; __ JumpIfSmi(rax, &right_smi, Label::kNear); - __ CompareMap(rax, masm->isolate()->factory()->heap_number_map()); + __ CompareMap(rax, isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined1, Label::kNear); __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); __ jmp(&left, Label::kNear); @@ -3893,7 +3684,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&left); __ JumpIfSmi(rdx, &left_smi, Label::kNear); - __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map()); + __ CompareMap(rdx, isolate()->factory()->heap_number_map()); __ j(not_equal, &maybe_undefined2, Label::kNear); __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); __ jmp(&done); @@ -3918,13 +3709,13 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&unordered); __ bind(&generic_stub); - ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, + ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, CompareIC::GENERIC); - __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); __ bind(&maybe_undefined1); if (Token::IsOrderedRelationalCompareOp(op_)) { - __ Cmp(rax, masm->isolate()->factory()->undefined_value()); + __ Cmp(rax, isolate()->factory()->undefined_value()); __ j(not_equal, &miss); __ JumpIfSmi(rdx, &unordered); __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); @@ -3934,7 +3725,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { __ bind(&maybe_undefined2); if (Token::IsOrderedRelationalCompareOp(op_)) { - __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); + __ Cmp(rdx, isolate()->factory()->undefined_value()); __ j(equal, &unordered); } @@ -4160,7 +3951,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { { // Call the runtime system in a fresh internal frame. ExternalReference miss = - ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); + ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); FrameScope scope(masm, StackFrame::INTERNAL); __ Push(rdx); @@ -4233,7 +4024,8 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, __ bind(&good); } - NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, + NEGATIVE_LOOKUP); __ Push(Handle<Object>(name)); __ Push(Immediate(name->Hash())); __ CallStub(&stub); @@ -4283,7 +4075,8 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, __ j(equal, done); } - NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP); + NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1, + POSITIVE_LOOKUP); __ Push(name); __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); __ shrl(r0, Immediate(Name::kHashShift)); @@ -4344,7 +4137,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { times_pointer_size, kElementsStartOffset - kHeapObjectTag)); - __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); + __ Cmp(scratch, isolate()->factory()->undefined_value()); __ j(equal, ¬_in_dictionary); // Stop if found the property. @@ -4387,10 +4180,10 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( Isolate* isolate) { - StoreBufferOverflowStub stub1(kDontSaveFPRegs); - stub1.GetCode(isolate); - StoreBufferOverflowStub stub2(kSaveFPRegs); - stub2.GetCode(isolate); + StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); + stub1.GetCode(); + StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); + stub2.GetCode(); } @@ -4489,14 +4282,13 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { // TODO(gc) Can we just set address arg2 in the beginning? __ Move(arg_reg_2, address); __ LoadAddress(arg_reg_3, - ExternalReference::isolate_address(masm->isolate())); + ExternalReference::isolate_address(isolate())); int argument_count = 3; AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(argument_count); __ CallCFunction( - ExternalReference::incremental_marking_record_write_function( - masm->isolate()), + ExternalReference::incremental_marking_record_write_function(isolate()), argument_count); regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); } @@ -4674,8 +4466,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { - CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); - __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); + __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); int parameter_count_offset = StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; __ movp(rbx, MemOperand(rbp, parameter_count_offset)); @@ -4691,7 +4483,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != NULL) { - ProfileEntryHookStub stub; + ProfileEntryHookStub stub(masm->isolate()); masm->CallStub(&stub); } } @@ -4716,7 +4508,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); // Call the entry hook function. - __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), + __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), Assembler::RelocInfoNone()); AllowExternalCallThatCantCauseGC scope(masm); @@ -4738,7 +4530,7 @@ template<class T> static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { if (mode == DISABLE_ALLOCATION_SITES) { - T stub(GetInitialFastElementsKind(), mode); + T stub(masm->isolate(), GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { int last_index = GetSequenceIndexFromFastElementsKind( @@ -4748,7 +4540,7 @@ static void CreateArrayDispatch(MacroAssembler* masm, ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmpl(rdx, Immediate(kind)); __ j(not_equal, &next); - T stub(kind); + T stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } @@ -4797,12 +4589,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); - ArraySingleArgumentConstructorStub stub_holey(holey_initial, + ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), + holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); __ bind(&normal_sequence); - ArraySingleArgumentConstructorStub stub(initial, + ArraySingleArgumentConstructorStub stub(masm->isolate(), + initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { @@ -4832,7 +4626,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmpl(rdx, Immediate(kind)); __ j(not_equal, &next); - ArraySingleArgumentConstructorStub stub(kind); + ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } @@ -4851,11 +4645,11 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(kind); - stub.GetCode(isolate); + T stub(isolate, kind); + stub.GetCode(); if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { - T stub1(kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(isolate); + T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); + stub1.GetCode(); } } } @@ -4876,12 +4670,12 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things - InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); - stubh1.GetCode(isolate); - InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); - stubh2.GetCode(isolate); - InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); - stubh3.GetCode(isolate); + InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); + stubh1.GetCode(); + InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); + stubh2.GetCode(); + InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); + stubh3.GetCode(); } } @@ -4964,7 +4758,7 @@ void InternalArrayConstructorStub::GenerateCase( __ testp(rax, rax); __ j(not_zero, ¬_zero_case); - InternalArrayNoArgumentConstructorStub stub0(kind); + InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0); __ bind(¬_zero_case); @@ -4980,16 +4774,16 @@ void InternalArrayConstructorStub::GenerateCase( __ j(zero, &normal_sequence); InternalArraySingleArgumentConstructorStub - stub1_holey(GetHoleyElementsKind(kind)); + stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey); } __ bind(&normal_sequence); - InternalArraySingleArgumentConstructorStub stub1(kind); + InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); __ bind(¬_one_case); - InternalArrayNArgumentsConstructorStub stubN(kind); + InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); __ TailCallStub(&stubN); } @@ -5024,7 +4818,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); // Retrieve elements_kind from bit field 2. __ andp(rcx, Immediate(Map::kElementsKindMask)); - __ shr(rcx, Immediate(Map::kElementsKindShift)); + __ shrp(rcx, Immediate(Map::kElementsKindShift)); if (FLAG_debug_code) { Label done; @@ -5105,7 +4899,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { __ Push(scratch); // isolate __ Move(scratch, - ExternalReference::isolate_address(masm->isolate())); + ExternalReference::isolate_address(isolate())); __ Push(scratch); // holder __ Push(holder); @@ -5143,7 +4937,8 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { // v8::InvocationCallback's argument. __ leap(arguments_arg, StackSpaceOperand(0)); - Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); + ExternalReference thunk_ref = + ExternalReference::invoke_function_callback(isolate()); // Accessor for FunctionCallbackInfo and first js arg. StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1, @@ -5155,7 +4950,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) { is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset); __ CallApiFunctionAndReturn( api_function_address, - thunk_address, + thunk_ref, callback_arg, argc + FCA::kArgsLength + 1, return_value_operand, @@ -5202,7 +4997,8 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { // could be used to pass arguments. __ leap(accessor_info_arg, StackSpaceOperand(0)); - Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); + ExternalReference thunk_ref = + ExternalReference::invoke_accessor_getter_callback(isolate()); // It's okay if api_function_address == getter_arg // but not accessor_info_arg or name_arg @@ -5215,7 +5011,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { PropertyCallbackArguments::kArgsLength - 1 - PropertyCallbackArguments::kReturnValueOffset); __ CallApiFunctionAndReturn(api_function_address, - thunk_address, + thunk_ref, getter_arg, kStackSpace, return_value_operand, diff --git a/deps/v8/src/x64/code-stubs-x64.h b/deps/v8/src/x64/code-stubs-x64.h index 8c8ab691a..2d6d21d0a 100644 --- a/deps/v8/src/x64/code-stubs-x64.h +++ b/deps/v8/src/x64/code-stubs-x64.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_CODE_STUBS_X64_H_ #define V8_X64_CODE_STUBS_X64_H_ @@ -38,8 +15,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); class StoreBufferOverflowStub: public PlatformCodeStub { public: - explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) - : save_doubles_(save_fp) { } + StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp) + : PlatformCodeStub(isolate), save_doubles_(save_fp) { } void Generate(MacroAssembler* masm); @@ -86,7 +63,7 @@ class StringHelper : public AllStatic { class SubStringStub: public PlatformCodeStub { public: - SubStringStub() {} + explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {} private: Major MajorKey() { return SubString; } @@ -98,7 +75,7 @@ class SubStringStub: public PlatformCodeStub { class StringCompareStub: public PlatformCodeStub { public: - StringCompareStub() {} + explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) {} // Compares two flat ASCII strings and returns result in rax. static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, @@ -137,11 +114,16 @@ class NameDictionaryLookupStub: public PlatformCodeStub { public: enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; - NameDictionaryLookupStub(Register dictionary, + NameDictionaryLookupStub(Isolate* isolate, + Register dictionary, Register result, Register index, LookupMode mode) - : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { } + : PlatformCodeStub(isolate), + dictionary_(dictionary), + result_(result), + index_(index), + mode_(mode) { } void Generate(MacroAssembler* masm); @@ -197,12 +179,14 @@ class NameDictionaryLookupStub: public PlatformCodeStub { class RecordWriteStub: public PlatformCodeStub { public: - RecordWriteStub(Register object, + RecordWriteStub(Isolate* isolate, + Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) - : object_(object), + : PlatformCodeStub(isolate), + object_(object), value_(value), address_(address), remembered_set_action_(remembered_set_action), diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc index 9b92dc867..990301770 100644 --- a/deps/v8/src/x64/codegen-x64.cc +++ b/deps/v8/src/x64/codegen-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -256,12 +233,20 @@ void ElementsTransitionGenerator::GenerateSmiToDouble( __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); __ j(equal, &only_change_map); - // Check backing store for COW-ness. For COW arrays we have to - // allocate a new backing store. __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); - __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset), - Heap::kFixedCOWArrayMapRootIndex); - __ j(equal, &new_backing_store); + if (kPointerSize == kDoubleSize) { + // Check backing store for COW-ness. For COW arrays we have to + // allocate a new backing store. + __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset), + Heap::kFixedCOWArrayMapRootIndex); + __ j(equal, &new_backing_store); + } else { + // For x32 port we have to allocate a new backing store as SMI size is + // not equal with double size. + ASSERT(kDoubleSize == 2 * kPointerSize); + __ jmp(&new_backing_store); + } + // Check if the backing store is in new-space. If not, we need to allocate // a new one since the old one is in pointer-space. // If in new space, we can reuse the old backing store because it is @@ -608,10 +593,10 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, __ movsd(result, Operand(kScratchRegister, 6 * kDoubleSize)); __ leaq(temp1, Operand(temp2, 0x1ff800)); __ andq(temp2, Immediate(0x7ff)); - __ shr(temp1, Immediate(11)); + __ shrq(temp1, Immediate(11)); __ mulsd(double_scratch, Operand(kScratchRegister, 5 * kDoubleSize)); __ Move(kScratchRegister, ExternalReference::math_exp_log_table()); - __ shl(temp1, Immediate(52)); + __ shlq(temp1, Immediate(52)); __ orq(temp1, Operand(kScratchRegister, temp2, times_8, 0)); __ Move(kScratchRegister, ExternalReference::math_exp_constants(0)); __ subsd(double_scratch, input); @@ -631,37 +616,36 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm, #undef __ -static byte* GetNoCodeAgeSequence(uint32_t* length) { - static bool initialized = false; - static byte sequence[kNoCodeAgeSequenceLength]; - *length = kNoCodeAgeSequenceLength; - if (!initialized) { - // The sequence of instructions that is patched out for aging code is the - // following boilerplate stack-building prologue that is found both in - // FUNCTION and OPTIMIZED_FUNCTION code: - CodePatcher patcher(sequence, kNoCodeAgeSequenceLength); - patcher.masm()->pushq(rbp); - patcher.masm()->movp(rbp, rsp); - patcher.masm()->Push(rsi); - patcher.masm()->Push(rdi); - initialized = true; - } - return sequence; +CodeAgingHelper::CodeAgingHelper() { + ASSERT(young_sequence_.length() == kNoCodeAgeSequenceLength); + // The sequence of instructions that is patched out for aging code is the + // following boilerplate stack-building prologue that is found both in + // FUNCTION and OPTIMIZED_FUNCTION code: + CodePatcher patcher(young_sequence_.start(), young_sequence_.length()); + patcher.masm()->pushq(rbp); + patcher.masm()->movp(rbp, rsp); + patcher.masm()->Push(rsi); + patcher.masm()->Push(rdi); } -bool Code::IsYoungSequence(byte* sequence) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); - bool result = (!memcmp(sequence, young_sequence, young_length)); - ASSERT(result || *sequence == kCallOpcode); +#ifdef DEBUG +bool CodeAgingHelper::IsOld(byte* candidate) const { + return *candidate == kCallOpcode; +} +#endif + + +bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) { + bool result = isolate->code_aging_helper()->IsYoung(sequence); + ASSERT(result || isolate->code_aging_helper()->IsOld(sequence)); return result; } -void Code::GetCodeAgeAndParity(byte* sequence, Age* age, +void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age, MarkingParity* parity) { - if (IsYoungSequence(sequence)) { + if (IsYoungSequence(isolate, sequence)) { *age = kNoAgeCodeAge; *parity = NO_MARKING_PARITY; } else { @@ -678,10 +662,9 @@ void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age, MarkingParity parity) { - uint32_t young_length; - byte* young_sequence = GetNoCodeAgeSequence(&young_length); + uint32_t young_length = isolate->code_aging_helper()->young_sequence_length(); if (age == kNoAgeCodeAge) { - CopyBytes(sequence, young_sequence, young_length); + isolate->code_aging_helper()->CopyYoungSequenceTo(sequence); CPU::FlushICache(sequence, young_length); } else { Code* stub = GetCodeAgeStub(isolate, age, parity); diff --git a/deps/v8/src/x64/codegen-x64.h b/deps/v8/src/x64/codegen-x64.h index e637ff006..540bba77c 100644 --- a/deps/v8/src/x64/codegen-x64.h +++ b/deps/v8/src/x64/codegen-x64.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_CODEGEN_X64_H_ #define V8_X64_CODEGEN_X64_H_ diff --git a/deps/v8/src/x64/cpu-x64.cc b/deps/v8/src/x64/cpu-x64.cc index 4fa290a8b..9243e2fb5 100644 --- a/deps/v8/src/x64/cpu-x64.cc +++ b/deps/v8/src/x64/cpu-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. // CPU specific code for x64 independent of OS goes here. @@ -41,16 +18,6 @@ namespace v8 { namespace internal { -void CPU::SetUp() { - CpuFeatures::Probe(); -} - - -bool CPU::SupportsCrankshaft() { - return true; // Yay! -} - - void CPU::FlushICache(void* start, size_t size) { // No need to flush the instruction cache on Intel. On Intel instruction // cache flushing is only necessary when multiple cores running the same diff --git a/deps/v8/src/x64/debug-x64.cc b/deps/v8/src/x64/debug-x64.cc index 36d5df678..6e0e05fda 100644 --- a/deps/v8/src/x64/debug-x64.cc +++ b/deps/v8/src/x64/debug-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -37,8 +14,6 @@ namespace v8 { namespace internal { -#ifdef ENABLE_DEBUGGER_SUPPORT - bool BreakLocationIterator::IsDebugBreakAtReturn() { return Debug::IsDebugBreakAtReturn(rinfo()); } @@ -50,7 +25,7 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() { void BreakLocationIterator::SetDebugBreakAtReturn() { ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallSequenceLength); rinfo()->PatchCodeWithCall( - debug_info_->GetIsolate()->debug()->debug_break_return()->entry(), + debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry(), Assembler::kJSReturnSequenceLength - Assembler::kCallSequenceLength); } @@ -80,7 +55,7 @@ bool BreakLocationIterator::IsDebugBreakAtSlot() { void BreakLocationIterator::SetDebugBreakAtSlot() { ASSERT(IsDebugBreakSlot()); rinfo()->PatchCodeWithCall( - debug_info_->GetIsolate()->debug()->debug_break_slot()->entry(), + debug_info_->GetIsolate()->builtins()->Slot_DebugBreak()->entry(), Assembler::kDebugBreakSlotLength - Assembler::kCallSequenceLength); } @@ -124,7 +99,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ Push(reg); } if ((non_object_regs & (1 << r)) != 0) { - __ PushInt64AsTwoSmis(reg); + __ PushRegisterAsTwoSmis(reg); } } @@ -134,7 +109,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ Set(rax, 0); // No arguments (argc == 0). __ Move(rbx, ExternalReference::debug_break(masm->isolate())); - CEntryStub ceb(1); + CEntryStub ceb(masm->isolate(), 1); __ CallStub(&ceb); // Restore the register values from the expression stack. @@ -149,7 +124,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } // Reconstruct the 64-bit value from two smis. if ((non_object_regs & (1 << r)) != 0) { - __ PopInt64AsTwoSmis(reg); + __ PopRegisterAsTwoSmis(reg); } } @@ -177,6 +152,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, } +void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) { + // Register state for CallICStub + // ----------- S t a t e ------------- + // -- rdx : type feedback slot (smi) + // -- rdi : function + // ----------------------------------- + Generate_DebugBreakCallHelper(masm, rdx.bit() | rdi.bit(), 0, false); +} + + void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Register state for IC load call (from ic-x64.cc). // ----------- S t a t e ------------- @@ -230,15 +215,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { - // Register state for IC call call (from ic-x64.cc) - // ----------- S t a t e ------------- - // -- rcx: function name - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, rcx.bit(), 0, false); -} - - void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { // Register state just before return from JS function (from codegen-x64.cc). // ----------- S t a t e ------------- @@ -257,18 +233,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { } -void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { - // Register state for CallFunctionStub (from code-stubs-x64.cc). - // ----------- S t a t e ------------- - // -- rdi : function - // -- rbx: feedback array - // -- rdx: slot in feedback array - // ----------------------------------- - Generate_DebugBreakCallHelper(masm, rbx.bit() | rdx.bit() | rdi.bit(), - 0, false); -} - - void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { // Register state for CallConstructStub (from code-stubs-x64.cc). // rax is the actual number of arguments not encoded as a smi, see comment @@ -348,8 +312,6 @@ const bool Debug::kFrameDropperSupported = true; #undef __ -#endif // ENABLE_DEBUGGER_SUPPORT - } } // namespace v8::internal #endif // V8_TARGET_ARCH_X64 diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc index 4bc644def..9016d4b75 100644 --- a/deps/v8/src/x64/deoptimizer-x64.cc +++ b/deps/v8/src/x64/deoptimizer-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -230,7 +207,7 @@ void Deoptimizer::EntryGenerator::Generate() { // Fill in the input registers. for (int i = kNumberOfRegisters -1; i >= 0; i--) { int offset = (i * kPointerSize) + FrameDescription::registers_offset(); - __ Pop(Operand(rbx, offset)); + __ PopQuad(Operand(rbx, offset)); } // Fill in the double input registers. @@ -307,13 +284,13 @@ void Deoptimizer::EntryGenerator::Generate() { // Push state, pc, and continuation from the last output frame. __ Push(Operand(rbx, FrameDescription::state_offset())); - __ Push(Operand(rbx, FrameDescription::pc_offset())); - __ Push(Operand(rbx, FrameDescription::continuation_offset())); + __ PushQuad(Operand(rbx, FrameDescription::pc_offset())); + __ PushQuad(Operand(rbx, FrameDescription::continuation_offset())); // Push the registers from the last output frame. for (int i = 0; i < kNumberOfRegisters; i++) { int offset = (i * kPointerSize) + FrameDescription::registers_offset(); - __ Push(Operand(rbx, offset)); + __ PushQuad(Operand(rbx, offset)); } // Restore the registers from the stack. @@ -352,11 +329,19 @@ void Deoptimizer::TableEntryGenerator::GeneratePrologue() { void FrameDescription::SetCallerPc(unsigned offset, intptr_t value) { + if (kPCOnStackSize == 2 * kPointerSize) { + // Zero out the high-32 bit of PC for x32 port. + SetFrameSlot(offset + kPointerSize, 0); + } SetFrameSlot(offset, value); } void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { + if (kFPOnStackSize == 2 * kPointerSize) { + // Zero out the high-32 bit of FP for x32 port. + SetFrameSlot(offset + kPointerSize, 0); + } SetFrameSlot(offset, value); } diff --git a/deps/v8/src/x64/disasm-x64.cc b/deps/v8/src/x64/disasm-x64.cc index b870eae85..bef2f82df 100644 --- a/deps/v8/src/x64/disasm-x64.cc +++ b/deps/v8/src/x64/disasm-x64.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <assert.h> #include <stdio.h> diff --git a/deps/v8/src/x64/frames-x64.cc b/deps/v8/src/x64/frames-x64.cc index 3154d80a6..7121d68cd 100644 --- a/deps/v8/src/x64/frames-x64.cc +++ b/deps/v8/src/x64/frames-x64.cc @@ -1,29 +1,6 @@ // Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/x64/frames-x64.h b/deps/v8/src/x64/frames-x64.h index 1fb77ffa6..43c11961c 100644 --- a/deps/v8/src/x64/frames-x64.h +++ b/deps/v8/src/x64/frames-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_FRAMES_X64_H_ #define V8_X64_FRAMES_X64_H_ @@ -56,11 +33,12 @@ class EntryFrameConstants : public AllStatic { static const int kXMMRegistersBlockSize = kXMMRegisterSize * kCalleeSaveXMMRegisters; static const int kCallerFPOffset = - -10 * kPointerSize - kXMMRegistersBlockSize; + -3 * kPointerSize + -7 * kRegisterSize - kXMMRegistersBlockSize; #else - static const int kCallerFPOffset = -8 * kPointerSize; + // We have 3 Push and 5 pushq in the JSEntryStub::GenerateBody. + static const int kCallerFPOffset = -3 * kPointerSize + -5 * kRegisterSize; #endif - static const int kArgvOffset = 6 * kPointerSize; + static const int kArgvOffset = 6 * kPointerSize; }; @@ -132,6 +110,10 @@ inline Object* JavaScriptFrame::function_slot_object() const { inline void StackHandler::SetFp(Address slot, Address fp) { + if (kFPOnStackSize == 2 * kPointerSize) { + // Zero out the high-32 bit of FP for x32 port. + Memory::Address_at(slot + kPointerSize) = 0; + } Memory::Address_at(slot) = fp; } diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc index f0b943862..475080553 100644 --- a/deps/v8/src/x64/full-codegen-x64.cc +++ b/deps/v8/src/x64/full-codegen-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -74,7 +51,7 @@ class JumpPatchSite BASE_EMBEDDED { void EmitPatchInfo() { if (patch_site_.is_bound()) { int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); - ASSERT(is_int8(delta_to_patch_site)); + ASSERT(is_uint8(delta_to_patch_site)); __ testl(rax, Immediate(delta_to_patch_site)); #ifdef DEBUG info_emitted_ = true; @@ -107,11 +84,15 @@ static void EmitStackCheck(MacroAssembler* masm_, Isolate* isolate = masm_->isolate(); Label ok; ASSERT(scratch.is(rsp) == (pointers == 0)); + Heap::RootListIndex index; if (pointers != 0) { - __ movq(scratch, rsp); - __ subq(scratch, Immediate(pointers * kPointerSize)); + __ movp(scratch, rsp); + __ subp(scratch, Immediate(pointers * kPointerSize)); + index = Heap::kRealStackLimitRootIndex; + } else { + index = Heap::kStackLimitRootIndex; } - __ CompareRoot(scratch, Heap::kStackLimitRootIndex); + __ CompareRoot(scratch, index); __ j(above_equal, &ok, Label::kNear); __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET); __ bind(&ok); @@ -136,8 +117,6 @@ void FullCodeGenerator::Generate() { handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); - InitializeFeedbackVector(); - profiling_counter_ = isolate()->factory()->NewCell( Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -195,7 +174,7 @@ void FullCodeGenerator::Generate() { const int kMaxPushes = 32; if (locals_count >= kMaxPushes) { int loop_iterations = locals_count / kMaxPushes; - __ movq(rcx, Immediate(loop_iterations)); + __ movp(rcx, Immediate(loop_iterations)); Label loop_header; __ bind(&loop_header); // Do pushes. @@ -203,7 +182,7 @@ void FullCodeGenerator::Generate() { __ Push(rdx); } // Continue loop if not done. - __ decq(rcx); + __ decp(rcx); __ j(not_zero, &loop_header, Label::kNear); } int remaining = locals_count % kMaxPushes; @@ -226,7 +205,7 @@ void FullCodeGenerator::Generate() { __ Push(info->scope()->GetScopeInfo()); __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ Push(rdi); @@ -287,7 +266,7 @@ void FullCodeGenerator::Generate() { } else { type = ArgumentsAccessStub::NEW_SLOPPY_FAST; } - ArgumentsAccessStub stub(type); + ArgumentsAccessStub stub(isolate(), type); __ CallStub(&stub); SetVar(arguments, rax, rbx, rdx); @@ -359,6 +338,9 @@ void FullCodeGenerator::EmitProfilingCounterReset() { } +static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14; + + void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, Label* back_edge_target) { Comment cmnt(masm_, "[ Back edge bookkeeping"); @@ -369,17 +351,22 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); EmitProfilingCounterDecrement(weight); - __ j(positive, &ok, Label::kNear); - __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); - // Record a mapping of this PC offset to the OSR id. This is used to find - // the AST id from the unoptimized code in order to use it as a key into - // the deoptimization input data found in the optimized code. - RecordBackEdge(stmt->OsrEntryId()); + __ j(positive, &ok, Label::kNear); + { + PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset); + DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_); + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); - EmitProfilingCounterReset(); + // Record a mapping of this PC offset to the OSR id. This is used to find + // the AST id from the unoptimized code in order to use it as a key into + // the deoptimization input data found in the optimized code. + RecordBackEdge(stmt->OsrEntryId()); + EmitProfilingCounterReset(); + } __ bind(&ok); + PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); // Record a mapping of the OSR id to this PC. This is used if the OSR // entry becomes the target of a bailout. We don't expect it to be, but @@ -432,11 +419,11 @@ void FullCodeGenerator::EmitReturnSequence() { int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; __ Ret(arguments_bytes, rcx); -#ifdef ENABLE_DEBUGGER_SUPPORT // Add padding that will be overwritten by a debugger breakpoint. We - // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" - // (3 + 1 + 3). - const int kPadding = Assembler::kJSReturnSequenceLength - 7; + // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k" + // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32. + const int kPadding = Assembler::kJSReturnSequenceLength - + kPointerSize == kInt64Size ? 7 : 6; for (int i = 0; i < kPadding; ++i) { masm_->int3(); } @@ -444,7 +431,7 @@ void FullCodeGenerator::EmitReturnSequence() { // for the debugger's requirements. ASSERT(Assembler::kJSReturnSequenceLength <= masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); -#endif + info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); } } @@ -1159,15 +1146,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle<Object> feedback = Handle<Object>( - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), - isolate()); - StoreFeedbackVectorSlot(slot, feedback); - // No need for a write barrier, we are storing a Smi in the feedback vector. __ Move(rbx, FeedbackVector()); __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)), - Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); + TypeFeedbackInfo::MegamorphicSentinel(isolate())); __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); @@ -1324,7 +1306,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode(), info->is_generator()); + FastNewClosureStub stub(isolate(), + info->strict_mode(), + info->is_generator()); __ Move(rbx, info); __ CallStub(&stub); } else { @@ -1643,8 +1627,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; int properties_count = constant_properties->length() / 2; - if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || - flags != ObjectLiteral::kFastElements || + if (expr->may_store_doubles() || expr->depth() > 1 || + Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); @@ -1658,7 +1642,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_properties); __ Move(rdx, Smi::FromInt(flags)); - FastCloneShallowObjectStub stub(properties_count); + FastCloneShallowObjectStub stub(isolate(), properties_count); __ CallStub(&stub); } @@ -1793,11 +1777,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_elements); FastCloneShallowArrayStub stub( + isolate(), FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, allocation_site_mode, length); __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled() || + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); @@ -1821,7 +1806,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, length); __ CallStub(&stub); } @@ -1858,7 +1845,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else { // Store the subexpression value in the array's elements. __ Move(rcx, Smi::FromInt(i)); - StoreArrayLiteralElementStub stub; + StoreArrayLiteralElementStub stub(isolate()); __ CallStub(&stub); } @@ -1875,7 +1862,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { void FullCodeGenerator::VisitAssignment(Assignment* expr) { - ASSERT(expr->target()->IsValidLeftHandSide()); + ASSERT(expr->target()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ Assignment"); @@ -2108,7 +2095,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) { CallIC(ic, TypeFeedbackId::None()); __ movp(rdi, rax); __ movp(Operand(rsp, 2 * kPointerSize), rdi); - CallFunctionStub stub(1, CALL_AS_METHOD); + CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); __ CallStub(&stub); __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); @@ -2164,9 +2151,8 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator, // Push holes for arguments to generator function. __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - __ movsxlq(rdx, - FieldOperand(rdx, - SharedFunctionInfo::kFormalParameterCountOffset)); + __ LoadSharedFunctionInfoSpecialField(rdx, rdx, + SharedFunctionInfo::kFormalParameterCountOffset); __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); Label push_argument_holes, push_frame; __ bind(&push_argument_holes); @@ -2252,7 +2238,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { Label gc_required; Label allocated; - Handle<Map> map(isolate()->native_context()->generator_result_map()); + Handle<Map> map(isolate()->native_context()->iterator_result_map()); __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT); __ jmp(&allocated); @@ -2317,8 +2303,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, __ bind(&stub_call); __ movp(rax, rcx); - BinaryOpICStub stub(op, mode); - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + BinaryOpICStub stub(isolate(), op, mode); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); __ jmp(&done, Label::kNear); @@ -2365,16 +2351,16 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode) { __ Pop(rdx); - BinaryOpICStub stub(op, mode); + BinaryOpICStub stub(isolate(), op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); + CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); patch_site.EmitPatchInfo(); context()->Plug(rax); } void FullCodeGenerator::EmitAssignment(Expression* expr) { - ASSERT(expr->IsValidLeftHandSide()); + ASSERT(expr->IsValidReferenceExpression()); // Left-hand side can only be a property, a global or a (parameter or local) // slot. @@ -2568,14 +2554,14 @@ void FullCodeGenerator::CallIC(Handle<Code> code, // Code common for calls using the IC. -void FullCodeGenerator::EmitCallWithIC(Call* expr) { +void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - CallFunctionFlags flags; - // Get the target function; - if (callee->IsVariableProxy()) { + CallIC::CallType call_type = callee->IsVariableProxy() + ? CallIC::FUNCTION + : CallIC::METHOD; + // Get the target function. + if (call_type == CallIC::FUNCTION) { { StackValueContext context(this); EmitVariableLoad(callee->AsVariableProxy()); PrepareForBailout(callee, NO_REGISTERS); @@ -2583,7 +2569,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push undefined as receiver. This is patched in the method prologue if it // is a sloppy mode method. __ Push(isolate()->factory()->undefined_value()); - flags = NO_CALL_FUNCTION_FLAGS; } else { // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2593,40 +2578,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { // Push the target function under the receiver. __ Push(Operand(rsp, 0)); __ movp(Operand(rsp, kPointerSize), rax); - flags = CALL_AS_METHOD; - } - - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, flags); - __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - - // Restore context register. - __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, rax); + EmitCall(expr, call_type); } // Common code for calls using the IC. -void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, - Expression* key) { +void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, + Expression* key) { // Load the key. VisitForAccumulatorValue(key); Expression* callee = expr->expression(); - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); // Load the function from the receiver. ASSERT(callee->IsProperty()); @@ -2638,29 +2602,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, __ Push(Operand(rsp, 0)); __ movp(Operand(rsp, kPointerSize), rax); - // Load the arguments. - { PreservePositionScope scope(masm()->positions_recorder()); - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - } - - // Record source position for debugger. - SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, CALL_AS_METHOD); - __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); - - RecordJSReturnSite(expr); - // Restore context register. - __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, rax); + EmitCall(expr, CallIC::METHOD); } -void FullCodeGenerator::EmitCallWithStub(Call* expr) { - // Code common for calls using the call stub. +void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { + // Load the arguments. ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); { PreservePositionScope scope(masm()->positions_recorder()); @@ -2668,20 +2615,19 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { VisitForStackValue(args->at(i)); } } - // Record source position for debugger. - SetSourcePosition(expr->position()); - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); - __ Move(rbx, FeedbackVector()); + // Record source position of the IC call. + SetSourcePosition(expr->position()); + Handle<Code> ic = CallIC::initialize_stub( + isolate(), arg_count, call_type); __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot())); - - // Record call targets in unoptimized code. - CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub); + // Don't assign a type feedback id to the IC, since type feedback is provided + // by the vector above. + CallIC(ic); + RecordJSReturnSite(expr); + // Restore context register. __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); // Discard the function left on TOS. @@ -2750,7 +2696,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { } // Record source position for debugger. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); RecordJSReturnSite(expr); @@ -2758,7 +2704,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); context()->DropAndPlug(1, rax); } else if (call_type == Call::GLOBAL_CALL) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else if (call_type == Call::LOOKUP_SLOT_CALL) { // Call to a lookup slot (dynamically introduced variable). @@ -2795,16 +2741,16 @@ void FullCodeGenerator::VisitCall(Call* expr) { // The receiver is either the global receiver or an object found by // LoadContextSlot. - EmitCallWithStub(expr); + EmitCall(expr); } else if (call_type == Call::PROPERTY_CALL) { Property* property = callee->AsProperty(); { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(property->obj()); } if (property->key()->IsPropertyName()) { - EmitCallWithIC(expr); + EmitCallWithLoadIC(expr); } else { - EmitKeyedCallWithIC(expr, property->key()); + EmitKeyedCallWithLoadIC(expr, property->key()); } } else { ASSERT(call_type == Call::OTHER_CALL); @@ -2814,7 +2760,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { } __ PushRoot(Heap::kUndefinedValueRootIndex); // Emit function call. - EmitCallWithStub(expr); + EmitCall(expr); } #ifdef DEBUG @@ -2851,12 +2797,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ movp(rdi, Operand(rsp, arg_count * kPointerSize)); // Record call targets in unoptimized code, but not in the snapshot. - Handle<Object> uninitialized = - TypeFeedbackInfo::UninitializedSentinel(isolate()); - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); if (FLAG_pretenuring_call_new) { - StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), - isolate()->factory()->NewAllocationSite()); + EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); ASSERT(expr->AllocationSiteFeedbackSlot() == expr->CallNewFeedbackSlot() + 1); } @@ -2864,8 +2806,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { __ Move(rbx, FeedbackVector()); __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot())); - CallConstructStub stub(RECORD_CALL_TARGET); - __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); + CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); + __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); PrepareForBailoutForId(expr->ReturnId(), TOS_REG); context()->Plug(rax); } @@ -3236,7 +3178,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) { VisitForAccumulatorValue(args->at(0)); __ movp(rdx, rax); __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); - ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); __ CallStub(&stub); context()->Plug(rax); } @@ -3326,30 +3268,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { } -void FullCodeGenerator::EmitLog(CallRuntime* expr) { - // Conditionally generate a log call. - // Args: - // 0 (literal string): The type of logging (corresponds to the flags). - // This is used to determine whether or not to generate the log call. - // 1 (string): Format string. Access the string at argument index 2 - // with '%2s' (see Logger::LogRuntime for all the formats). - // 2 (array): Arguments to the format string. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT_EQ(args->length(), 3); - if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { - VisitForStackValue(args->at(1)); - VisitForStackValue(args->at(2)); - __ CallRuntime(Runtime::kHiddenLog, 2); - } - // Finally, we're expected to leave a value on the top of the stack. - __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); - context()->Plug(rax); -} - - void FullCodeGenerator::EmitSubString(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - SubStringStub stub; + SubStringStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -3362,7 +3283,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { // Load the arguments on the stack and call the stub. - RegExpExecStub stub; + RegExpExecStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 4); VisitForStackValue(args->at(0)); @@ -3512,7 +3433,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { ASSERT(args->length() == 2); VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - MathPowStub stub(MathPowStub::ON_STACK); + MathPowStub stub(isolate(), MathPowStub::ON_STACK); __ CallStub(&stub); context()->Plug(rax); } @@ -3553,7 +3474,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { // Load the argument into rax and call the stub. VisitForAccumulatorValue(args->at(0)); - NumberToStringStub stub; + NumberToStringStub stub(isolate()); __ CallStub(&stub); context()->Plug(rax); } @@ -3679,7 +3600,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { VisitForAccumulatorValue(args->at(1)); __ Pop(rdx); - StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); + StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); __ CallStub(&stub); context()->Plug(rax); } @@ -3692,32 +3613,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - StringCompareStub stub; + StringCompareStub stub(isolate()); __ CallStub(&stub); context()->Plug(rax); } -void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_log, 1); - context()->Plug(rax); -} - - -void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { - // Load the argument on the stack and call the runtime function. - ZoneList<Expression*>* args = expr->arguments(); - ASSERT(args->length() == 1); - VisitForStackValue(args->at(0)); - __ CallRuntime(Runtime::kMath_sqrt, 1); - context()->Plug(rax); -} - - void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() >= 2); @@ -3751,7 +3652,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { - RegExpConstructResultStub stub; + RegExpConstructResultStub stub(isolate()); ZoneList<Expression*>* args = expr->arguments(); ASSERT(args->length() == 3); VisitForStackValue(args->at(0)); @@ -4177,7 +4078,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { // Record source position of the IC call. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); + CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); @@ -4313,7 +4214,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { - ASSERT(expr->expression()->IsValidLeftHandSide()); + ASSERT(expr->expression()->IsValidReferenceExpression()); Comment cmnt(masm_, "[ CountOperation"); SetSourcePosition(expr->position()); @@ -4400,7 +4301,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ bind(&slow); } - ToNumberStub convert_stub; + ToNumberStub convert_stub(isolate()); __ CallStub(&convert_stub); // Save result for postfix expressions. @@ -4430,8 +4331,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ bind(&stub_call); __ movp(rdx, rax); __ Move(rax, Smi::FromInt(1)); - BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE); - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); + BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE); + CallIC(stub.GetCode(), expr->CountBinOpFeedbackId()); patch_site.EmitPatchInfo(); __ bind(&done); @@ -4546,12 +4447,13 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - if (check->Equals(isolate()->heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(check, factory->number_string())) { __ JumpIfSmi(rax, if_true); __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->string_string())) { + } else if (String::Equals(check, factory->string_string())) { __ JumpIfSmi(rax, if_false); // Check for undetectable objects => false. __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); @@ -4559,20 +4461,20 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ testb(FieldOperand(rdx, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); Split(zero, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->symbol_string())) { + } else if (String::Equals(check, factory->symbol_string())) { __ JumpIfSmi(rax, if_false); __ CmpObjectType(rax, SYMBOL_TYPE, rdx); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->boolean_string())) { + } else if (String::Equals(check, factory->boolean_string())) { __ CompareRoot(rax, Heap::kTrueValueRootIndex); __ j(equal, if_true); __ CompareRoot(rax, Heap::kFalseValueRootIndex); Split(equal, if_true, if_false, fall_through); } else if (FLAG_harmony_typeof && - check->Equals(isolate()->heap()->null_string())) { + String::Equals(check, factory->null_string())) { __ CompareRoot(rax, Heap::kNullValueRootIndex); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->undefined_string())) { + } else if (String::Equals(check, factory->undefined_string())) { __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); __ j(equal, if_true); __ JumpIfSmi(rax, if_false); @@ -4581,14 +4483,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, __ testb(FieldOperand(rdx, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); Split(not_zero, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->function_string())) { + } else if (String::Equals(check, factory->function_string())) { __ JumpIfSmi(rax, if_false); STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx); __ j(equal, if_true); __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE); Split(equal, if_true, if_false, fall_through); - } else if (check->Equals(isolate()->heap()->object_string())) { + } else if (String::Equals(check, factory->object_string())) { __ JumpIfSmi(rax, if_false); if (!FLAG_harmony_typeof) { __ CompareRoot(rax, Heap::kNullValueRootIndex); @@ -4639,7 +4541,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::INSTANCEOF: { VisitForStackValue(expr->right()); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ CallStub(&stub); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); __ testp(rax, rax); @@ -4859,7 +4761,6 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( static const byte kJnsInstruction = 0x79; -static const byte kJnsOffset = 0x1d; static const byte kNopByteOne = 0x66; static const byte kNopByteTwo = 0x90; #ifdef DEBUG diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc index ea118d076..90a303dba 100644 --- a/deps/v8/src/x64/ic-x64.cc +++ b/deps/v8/src/x64/ic-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -421,9 +398,9 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // based on 32 bits of the map pointer and the string hash. __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); __ movl(rcx, rbx); - __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift)); + __ shrl(rcx, Immediate(KeyedLookupCache::kMapHashShift)); __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset)); - __ shr(rdi, Immediate(String::kHashShift)); + __ shrl(rdi, Immediate(String::kHashShift)); __ xorp(rcx, rdi); int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); __ andp(rcx, Immediate(mask)); @@ -439,7 +416,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { for (int i = 0; i < kEntriesPerBucket - 1; i++) { Label try_next_entry; __ movp(rdi, rcx); - __ shl(rdi, Immediate(kPointerSizeLog2 + 1)); + __ shlp(rdi, Immediate(kPointerSizeLog2 + 1)); __ LoadAddress(kScratchRegister, cache_keys); int off = kPointerSize * i * 2; __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); @@ -1303,7 +1280,7 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { Address delta_address = test_instruction_address + 1; // The delta to the start of the map check instruction and the // condition code uses at the patched jump. - int8_t delta = *reinterpret_cast<int8_t*>(delta_address); + uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address); if (FLAG_trace_ic) { PrintF("[ patching ic at %p, test=%p, delta=%d\n", address, test_instruction_address, delta); diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc index 894a4dd3a..c3dc8ac30 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.cc +++ b/deps/v8/src/x64/lithium-codegen-x64.cc @@ -1,29 +1,6 @@ // Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -89,13 +66,6 @@ void LCodeGen::FinishCode(Handle<Code> code) { code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); PopulateDeoptimizationData(code); - info()->CommitDependencies(code); -} - - -void LChunkBuilder::Abort(BailoutReason reason) { - info()->set_bailout_reason(reason); - status_ = ABORTED; } @@ -219,7 +189,7 @@ bool LCodeGen::GeneratePrologue() { Comment(";;; Allocate local context"); // Argument to NewContext is the function, which is still in rdi. if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); + FastNewContextStub stub(isolate(), heap_slots); __ CallStub(&stub); } else { __ Push(rdi); @@ -284,6 +254,12 @@ void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { + if (FLAG_debug_code && FLAG_enable_slow_asserts && instr->HasResult() && + instr->hydrogen_value()->representation().IsInteger32() && + instr->result()->IsRegister()) { + __ AssertZeroExtended(ToRegister(instr->result())); + } + if (instr->HasResult() && instr->MustSignExtendResult(chunk())) { if (instr->result()->IsRegister()) { Register result_reg = ToRegister(instr->result()); @@ -687,6 +663,7 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode) { + environment->set_has_been_used(); if (!environment->HasBeenRegistered()) { // Physical stack frame layout: // -x ............. -4 0 ..................................... y @@ -806,7 +783,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { int length = deoptimizations_.length(); if (length == 0) return; Handle<DeoptimizationInputData> data = - factory()->NewDeoptimizationInputData(length, TENURED); + DeoptimizationInputData::New(isolate(), length, TENURED); Handle<ByteArray> translations = translations_.CreateByteArray(isolate()->factory()); @@ -982,18 +959,18 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ASSERT(ToRegister(instr->result()).is(rax)); switch (instr->hydrogen()->major_key()) { case CodeStub::RegExpExec: { - RegExpExecStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + RegExpExecStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::SubString: { - SubStringStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + SubStringStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } case CodeStub::StringCompare: { - StringCompareStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + StringCompareStub stub(isolate()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); break; } default: @@ -1138,22 +1115,28 @@ void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { } // If the divisor is negative, we have to negate and handle edge cases. - Label not_kmin_int, done; __ negl(dividend); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(zero, instr->environment()); } - if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { - // Note that we could emit branch-free code, but that would need one more - // register. - __ j(no_overflow, ¬_kmin_int, Label::kNear); - if (divisor == -1) { - DeoptimizeIf(no_condition, instr->environment()); - } else { - __ movl(dividend, Immediate(kMinInt / divisor)); - __ jmp(&done, Label::kNear); - } + + // If the negation could not overflow, simply shifting is OK. + if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { + __ sarl(dividend, Immediate(shift)); + return; } + + // Note that we could emit branch-free code, but that would need one more + // register. + if (divisor == -1) { + DeoptimizeIf(overflow, instr->environment()); + return; + } + + Label not_kmin_int, done; + __ j(no_overflow, ¬_kmin_int, Label::kNear); + __ movl(dividend, Immediate(kMinInt / divisor)); + __ jmp(&done, Label::kNear); __ bind(¬_kmin_int); __ sarl(dividend, Immediate(shift)); __ bind(&done); @@ -1205,11 +1188,64 @@ void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. +void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { + HBinaryOperation* hdiv = instr->hydrogen(); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); + Register remainder = ToRegister(instr->temp()); + Register result = ToRegister(instr->result()); + ASSERT(dividend.is(rax)); + ASSERT(remainder.is(rdx)); + ASSERT(result.is(rax)); + ASSERT(!divisor.is(rax)); + ASSERT(!divisor.is(rdx)); + + // Check for x / 0. + if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { + __ testl(divisor, divisor); + DeoptimizeIf(zero, instr->environment()); + } + + // Check for (0 / -x) that will produce negative zero. + if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { + Label dividend_not_zero; + __ testl(dividend, dividend); + __ j(not_zero, ÷nd_not_zero, Label::kNear); + __ testl(divisor, divisor); + DeoptimizeIf(sign, instr->environment()); + __ bind(÷nd_not_zero); + } + + // Check for (kMinInt / -1). + if (hdiv->CheckFlag(HValue::kCanOverflow)) { + Label dividend_not_min_int; + __ cmpl(dividend, Immediate(kMinInt)); + __ j(not_zero, ÷nd_not_min_int, Label::kNear); + __ cmpl(divisor, Immediate(-1)); + DeoptimizeIf(zero, instr->environment()); + __ bind(÷nd_not_min_int); + } + + // Sign extend to rdx (= remainder). + __ cdq(); + __ idivl(divisor); + + Label done; + __ testl(remainder, remainder); + __ j(zero, &done, Label::kNear); + __ xorl(remainder, divisor); + __ sarl(remainder, Immediate(31)); + __ addl(result, remainder); + __ bind(&done); +} + + void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { Register dividend = ToRegister(instr->dividend()); int32_t divisor = instr->divisor(); Register result = ToRegister(instr->result()); - ASSERT(divisor == kMinInt || (divisor != 0 && IsPowerOf2(Abs(divisor)))); + ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); ASSERT(!result.is(dividend)); // Check for (0 / -x) that will produce negative zero. @@ -1261,7 +1297,7 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } __ TruncatingDiv(dividend, Abs(divisor)); - if (divisor < 0) __ negp(rdx); + if (divisor < 0) __ negl(rdx); if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { __ movl(rax, rdx); @@ -1272,15 +1308,15 @@ void LCodeGen::DoDivByConstI(LDivByConstI* instr) { } +// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. void LCodeGen::DoDivI(LDivI* instr) { HBinaryOperation* hdiv = instr->hydrogen(); - Register dividend = ToRegister(instr->left()); - Register divisor = ToRegister(instr->right()); + Register dividend = ToRegister(instr->dividend()); + Register divisor = ToRegister(instr->divisor()); Register remainder = ToRegister(instr->temp()); - Register result = ToRegister(instr->result()); ASSERT(dividend.is(rax)); ASSERT(remainder.is(rdx)); - ASSERT(result.is(rax)); + ASSERT(ToRegister(instr->result()).is(rax)); ASSERT(!divisor.is(rax)); ASSERT(!divisor.is(rdx)); @@ -1314,15 +1350,7 @@ void LCodeGen::DoDivI(LDivI* instr) { __ cdq(); __ idivl(divisor); - if (hdiv->IsMathFloorOfDiv()) { - Label done; - __ testl(remainder, remainder); - __ j(zero, &done, Label::kNear); - __ xorl(remainder, divisor); - __ sarl(remainder, Immediate(31)); - __ addl(result, remainder); - __ bind(&done); - } else if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { + if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { // Deoptimize if remainder is not 0. __ testl(remainder, remainder); DeoptimizeIf(not_zero, instr->environment()); @@ -1473,13 +1501,25 @@ void LCodeGen::DoBitI(LBitI* instr) { } else if (right->IsStackSlot()) { switch (instr->op()) { case Token::BIT_AND: - __ andp(ToRegister(left), ToOperand(right)); + if (instr->IsInteger32()) { + __ andl(ToRegister(left), ToOperand(right)); + } else { + __ andp(ToRegister(left), ToOperand(right)); + } break; case Token::BIT_OR: - __ orp(ToRegister(left), ToOperand(right)); + if (instr->IsInteger32()) { + __ orl(ToRegister(left), ToOperand(right)); + } else { + __ orp(ToRegister(left), ToOperand(right)); + } break; case Token::BIT_XOR: - __ xorp(ToRegister(left), ToOperand(right)); + if (instr->IsInteger32()) { + __ xorl(ToRegister(left), ToOperand(right)); + } else { + __ xorp(ToRegister(left), ToOperand(right)); + } break; default: UNREACHABLE(); @@ -1489,13 +1529,25 @@ void LCodeGen::DoBitI(LBitI* instr) { ASSERT(right->IsRegister()); switch (instr->op()) { case Token::BIT_AND: - __ andp(ToRegister(left), ToRegister(right)); + if (instr->IsInteger32()) { + __ andl(ToRegister(left), ToRegister(right)); + } else { + __ andp(ToRegister(left), ToRegister(right)); + } break; case Token::BIT_OR: - __ orp(ToRegister(left), ToRegister(right)); + if (instr->IsInteger32()) { + __ orl(ToRegister(left), ToRegister(right)); + } else { + __ orp(ToRegister(left), ToRegister(right)); + } break; case Token::BIT_XOR: - __ xorp(ToRegister(left), ToRegister(right)); + if (instr->IsInteger32()) { + __ xorl(ToRegister(left), ToRegister(right)); + } else { + __ xorp(ToRegister(left), ToRegister(right)); + } break; default: UNREACHABLE(); @@ -1559,7 +1611,7 @@ void LCodeGen::DoShiftI(LShiftI* instr) { case Token::SHL: if (shift_count != 0) { if (instr->hydrogen_value()->representation().IsSmi()) { - __ shl(ToRegister(left), Immediate(shift_count)); + __ shlp(ToRegister(left), Immediate(shift_count)); } else { __ shll(ToRegister(left), Immediate(shift_count)); } @@ -1602,7 +1654,12 @@ void LCodeGen::DoSubI(LSubI* instr) { void LCodeGen::DoConstantI(LConstantI* instr) { - __ Set(ToRegister(instr->result()), instr->value()); + Register dst = ToRegister(instr->result()); + if (instr->value() == 0) { + __ xorl(dst, dst); + } else { + __ movl(dst, Immediate(instr->value())); + } } @@ -1634,8 +1691,16 @@ void LCodeGen::DoConstantE(LConstantE* instr) { void LCodeGen::DoConstantT(LConstantT* instr) { - Handle<Object> value = instr->value(isolate()); - __ Move(ToRegister(instr->result()), value); + Handle<Object> object = instr->value(isolate()); + AllowDeferredHandleDereference smi_check; + if (instr->hydrogen()->HasObjectMap()) { + Handle<Map> object_map = instr->hydrogen()->ObjectMap().handle(); + ASSERT(object->IsHeapObject()); + ASSERT(!object_map->is_stable() || + *object_map == Handle<HeapObject>::cast(object)->map()); + USE(object_map); + } + __ Move(ToRegister(instr->result()), object); } @@ -1775,6 +1840,7 @@ void LCodeGen::DoAddI(LAddI* instr) { if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { if (right->IsConstantOperand()) { + ASSERT(!target_rep.IsSmi()); // No support for smi-immediates. int32_t offset = ToInteger32(LConstantOperand::cast(right)); if (is_p) { __ leap(ToRegister(instr->result()), @@ -1793,6 +1859,7 @@ void LCodeGen::DoAddI(LAddI* instr) { } } else { if (right->IsConstantOperand()) { + ASSERT(!target_rep.IsSmi()); // No support for smi-immediates. if (is_p) { __ addp(ToRegister(left), Immediate(ToInteger32(LConstantOperand::cast(right)))); @@ -1940,8 +2007,8 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->right()).is(rax)); ASSERT(ToRegister(instr->result()).is(rax)); - BinaryOpICStub stub(instr->op(), NO_OVERWRITE); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -2534,10 +2601,10 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { void LCodeGen::DoInstanceOf(LInstanceOf* instr) { ASSERT(ToRegister(instr->context()).is(rsi)); - InstanceofStub stub(InstanceofStub::kNoFlags); + InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); __ Push(ToRegister(instr->left())); __ Push(ToRegister(instr->right())); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); Label true_value, done; __ testp(rax, rax); __ j(zero, &true_value, Label::kNear); @@ -2620,7 +2687,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, PushSafepointRegistersScope scope(this); InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); - InstanceofStub stub(flags); + InstanceofStub stub(isolate(), flags); __ Push(ToRegister(instr->value())); __ Push(instr->function()); @@ -2635,7 +2702,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, // safepoint with two arguments because stub is going to // remove the third argument from the stack before jumping // to instanceof builtin on the slow path. - CallCodeGeneric(stub.GetCode(isolate()), + CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS, @@ -2706,7 +2773,7 @@ void LCodeGen::DoReturn(LReturn* instr) { __ SmiToInteger32(reg, reg); Register return_addr_reg = reg.is(rcx) ? rbx : rcx; __ PopReturnAddressTo(return_addr_reg); - __ shl(reg, Immediate(kPointerSizeLog2)); + __ shlp(reg, Immediate(kPointerSizeLog2)); __ addp(rsp, reg); __ jmp(return_addr_reg); } @@ -2848,17 +2915,17 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { } Representation representation = access.representation(); - if (representation.IsSmi() && + if (representation.IsSmi() && SmiValuesAre32Bits() && instr->hydrogen()->representation().IsInteger32()) { -#ifdef DEBUG - Register scratch = kScratchRegister; - __ Load(scratch, FieldOperand(object, offset), representation); - __ AssertSmi(scratch); -#endif + if (FLAG_debug_code) { + Register scratch = kScratchRegister; + __ Load(scratch, FieldOperand(object, offset), representation); + __ AssertSmi(scratch); + } // Read int value directly from upper half of the smi. STATIC_ASSERT(kSmiTag == 0); - STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); + ASSERT(kSmiTagSize + kSmiShiftSize == 32); offset += kPointerSize / 2; representation = Representation::Integer32(); } @@ -2981,25 +3048,25 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { switch (elements_kind) { case EXTERNAL_INT8_ELEMENTS: case INT8_ELEMENTS: - __ movsxbq(result, operand); + __ movsxbl(result, operand); break; case EXTERNAL_UINT8_ELEMENTS: case EXTERNAL_UINT8_CLAMPED_ELEMENTS: case UINT8_ELEMENTS: case UINT8_CLAMPED_ELEMENTS: - __ movzxbp(result, operand); + __ movzxbl(result, operand); break; case EXTERNAL_INT16_ELEMENTS: case INT16_ELEMENTS: - __ movsxwq(result, operand); + __ movsxwl(result, operand); break; case EXTERNAL_UINT16_ELEMENTS: case UINT16_ELEMENTS: - __ movzxwp(result, operand); + __ movzxwl(result, operand); break; case EXTERNAL_INT32_ELEMENTS: case INT32_ELEMENTS: - __ movsxlq(result, operand); + __ movl(result, operand); break; case EXTERNAL_UINT32_ELEMENTS: case UINT32_ELEMENTS: @@ -3062,23 +3129,23 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { int offset = FixedArray::kHeaderSize - kHeapObjectTag; Representation representation = hinstr->representation(); - if (representation.IsInteger32() && + if (representation.IsInteger32() && SmiValuesAre32Bits() && hinstr->elements_kind() == FAST_SMI_ELEMENTS) { ASSERT(!requires_hole_check); -#ifdef DEBUG - Register scratch = kScratchRegister; - __ Load(scratch, - BuildFastArrayOperand(instr->elements(), - key, - FAST_ELEMENTS, - offset, - instr->additional_index()), - Representation::Smi()); - __ AssertSmi(scratch); -#endif + if (FLAG_debug_code) { + Register scratch = kScratchRegister; + __ Load(scratch, + BuildFastArrayOperand(instr->elements(), + key, + FAST_ELEMENTS, + offset, + instr->additional_index()), + Representation::Smi()); + __ AssertSmi(scratch); + } // Read int value directly from upper half of the smi. STATIC_ASSERT(kSmiTag == 0); - STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); + ASSERT(kSmiTagSize + kSmiShiftSize == 32); offset += kPointerSize / 2; } @@ -3470,8 +3537,8 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { __ bind(&allocated); __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); - __ shl(tmp2, Immediate(1)); - __ shr(tmp2, Immediate(1)); + __ shlq(tmp2, Immediate(1)); + __ shrq(tmp2, Immediate(1)); __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); __ StoreToSafepointRegisterSlot(input_reg, tmp); @@ -3573,7 +3640,7 @@ void LCodeGen::DoMathFloor(LMathFloor* instr) { __ testq(output_reg, Immediate(1)); DeoptimizeIf(not_zero, instr->environment()); __ Set(output_reg, 0); - __ jmp(&done, Label::kNear); + __ jmp(&done); __ bind(&positive_sign); } @@ -3713,7 +3780,7 @@ void LCodeGen::DoPower(LPower* instr) { ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); if (exponent_type.IsSmi()) { - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsTagged()) { Label no_deopt; @@ -3721,14 +3788,14 @@ void LCodeGen::DoPower(LPower* instr) { __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); DeoptimizeIf(not_equal, instr->environment()); __ bind(&no_deopt); - MathPowStub stub(MathPowStub::TAGGED); + MathPowStub stub(isolate(), MathPowStub::TAGGED); __ CallStub(&stub); } else if (exponent_type.IsInteger32()) { - MathPowStub stub(MathPowStub::INTEGER); + MathPowStub stub(isolate(), MathPowStub::INTEGER); __ CallStub(&stub); } else { ASSERT(exponent_type.IsDouble()); - MathPowStub stub(MathPowStub::DOUBLE); + MathPowStub stub(isolate(), MathPowStub::DOUBLE); __ CallStub(&stub); } } @@ -3819,8 +3886,8 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ASSERT(ToRegister(instr->result()).is(rax)); int arity = instr->arity(); - CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -3832,8 +3899,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) { __ Set(rax, instr->arity()); // No cell in ebx for construct type feedback in optimized code __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } @@ -3851,8 +3918,8 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { : DONT_OVERRIDE; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { Label done; if (IsFastPackedElementsKind(kind)) { @@ -3864,18 +3931,20 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { __ j(zero, &packed_case, Label::kNear); ElementsKind holey_kind = GetHoleyElementsKind(kind); - ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), + holey_kind, + override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ jmp(&done, Label::kNear); __ bind(&packed_case); } - ArraySingleArgumentConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); __ bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, override_mode); - CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); + CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); } } @@ -3929,7 +3998,6 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } Register object = ToRegister(instr->object()); - Handle<Map> transition = instr->transition(); SmiCheck check_needed = hinstr->value()->IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; @@ -3948,20 +4016,22 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { Condition cc = masm()->CheckSmi(value); DeoptimizeIf(cc, instr->environment()); - // We know that value is a smi now, so we can omit the check below. + // We know now that value is not a smi, so we can omit the check below. check_needed = OMIT_SMI_CHECK; } } } else if (representation.IsDouble()) { - ASSERT(transition.is_null()); ASSERT(access.IsInobject()); + ASSERT(!hinstr->has_transition()); ASSERT(!hinstr->NeedsWriteBarrier()); XMMRegister value = ToDoubleRegister(instr->value()); __ movsd(FieldOperand(object, offset), value); return; } - if (!transition.is_null()) { + if (hinstr->has_transition()) { + Handle<Map> transition = hinstr->transition_map(); + AddDeprecationDependency(transition); if (!hinstr->NeedsWriteBarrierForMap()) { __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); } else { @@ -3986,17 +4056,17 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); } - if (representation.IsSmi() && + if (representation.IsSmi() && SmiValuesAre32Bits() && hinstr->value()->representation().IsInteger32()) { ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); -#ifdef DEBUG - Register scratch = kScratchRegister; - __ Load(scratch, FieldOperand(write_register, offset), representation); - __ AssertSmi(scratch); -#endif + if (FLAG_debug_code) { + Register scratch = kScratchRegister; + __ Load(scratch, FieldOperand(write_register, offset), representation); + __ AssertSmi(scratch); + } // Store int value directly to upper half of the smi. STATIC_ASSERT(kSmiTag == 0); - STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); + ASSERT(kSmiTagSize + kSmiShiftSize == 32); offset += kPointerSize / 2; representation = Representation::Integer32(); } @@ -4051,65 +4121,64 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { } -void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { - if (FLAG_debug_code && check->hydrogen()->skip_check()) { - Label done; - __ j(NegateCondition(cc), &done, Label::kNear); - __ int3(); - __ bind(&done); - } else { - DeoptimizeIf(cc, check->environment()); - } -} - - void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { - HBoundsCheck* hinstr = instr->hydrogen(); - if (hinstr->skip_check()) return; - - Representation representation = hinstr->length()->representation(); - ASSERT(representation.Equals(hinstr->index()->representation())); + Representation representation = instr->hydrogen()->length()->representation(); + ASSERT(representation.Equals(instr->hydrogen()->index()->representation())); ASSERT(representation.IsSmiOrInteger32()); - if (instr->length()->IsRegister()) { - Register reg = ToRegister(instr->length()); - - if (instr->index()->IsConstantOperand()) { - int32_t constant_index = - ToInteger32(LConstantOperand::cast(instr->index())); + Condition cc = instr->hydrogen()->allow_equality() ? below : below_equal; + if (instr->length()->IsConstantOperand()) { + int32_t length = ToInteger32(LConstantOperand::cast(instr->length())); + Register index = ToRegister(instr->index()); + if (representation.IsSmi()) { + __ Cmp(index, Smi::FromInt(length)); + } else { + __ cmpl(index, Immediate(length)); + } + cc = ReverseCondition(cc); + } else if (instr->index()->IsConstantOperand()) { + int32_t index = ToInteger32(LConstantOperand::cast(instr->index())); + if (instr->length()->IsRegister()) { + Register length = ToRegister(instr->length()); if (representation.IsSmi()) { - __ Cmp(reg, Smi::FromInt(constant_index)); + __ Cmp(length, Smi::FromInt(index)); } else { - __ cmpl(reg, Immediate(constant_index)); + __ cmpl(length, Immediate(index)); } } else { - Register reg2 = ToRegister(instr->index()); + Operand length = ToOperand(instr->length()); if (representation.IsSmi()) { - __ cmpp(reg, reg2); + __ Cmp(length, Smi::FromInt(index)); } else { - __ cmpl(reg, reg2); + __ cmpl(length, Immediate(index)); } } } else { - Operand length = ToOperand(instr->length()); - if (instr->index()->IsConstantOperand()) { - int32_t constant_index = - ToInteger32(LConstantOperand::cast(instr->index())); + Register index = ToRegister(instr->index()); + if (instr->length()->IsRegister()) { + Register length = ToRegister(instr->length()); if (representation.IsSmi()) { - __ Cmp(length, Smi::FromInt(constant_index)); + __ cmpp(length, index); } else { - __ cmpl(length, Immediate(constant_index)); + __ cmpl(length, index); } } else { + Operand length = ToOperand(instr->length()); if (representation.IsSmi()) { - __ cmpp(length, ToRegister(instr->index())); + __ cmpp(length, index); } else { - __ cmpl(length, ToRegister(instr->index())); + __ cmpl(length, index); } } } - Condition condition = hinstr->allow_equality() ? below : below_equal; - ApplyCheckIf(condition, instr); + if (FLAG_debug_code && instr->hydrogen()->skip_check()) { + Label done; + __ j(NegateCondition(cc), &done, Label::kNear); + __ int3(); + __ bind(&done); + } else { + DeoptimizeIf(cc, instr->environment()); + } } @@ -4209,23 +4278,23 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { int offset = FixedArray::kHeaderSize - kHeapObjectTag; Representation representation = hinstr->value()->representation(); - if (representation.IsInteger32()) { + if (representation.IsInteger32() && SmiValuesAre32Bits()) { ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); ASSERT(hinstr->elements_kind() == FAST_SMI_ELEMENTS); -#ifdef DEBUG - Register scratch = kScratchRegister; - __ Load(scratch, - BuildFastArrayOperand(instr->elements(), - key, - FAST_ELEMENTS, - offset, - instr->additional_index()), - Representation::Smi()); - __ AssertSmi(scratch); -#endif + if (FLAG_debug_code) { + Register scratch = kScratchRegister; + __ Load(scratch, + BuildFastArrayOperand(instr->elements(), + key, + FAST_ELEMENTS, + offset, + instr->additional_index()), + Representation::Smi()); + __ AssertSmi(scratch); + } // Store int value directly to upper half of the smi. STATIC_ASSERT(kSmiTag == 0); - STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); + ASSERT(kSmiTagSize + kSmiShiftSize == 32); offset += kPointerSize / 2; } @@ -4318,17 +4387,14 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, ToRegister(instr->temp()), kDontSaveFPRegs); } else { + ASSERT(object_reg.is(rax)); ASSERT(ToRegister(instr->context()).is(rsi)); PushSafepointRegistersScope scope(this); - if (!object_reg.is(rax)) { - __ movp(rax, object_reg); - } __ Move(rbx, to_map); bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; - TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); + TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); __ CallStub(&stub); - RecordSafepointWithRegisters( - instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); + RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); } __ bind(¬_applicable); } @@ -4348,9 +4414,10 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) { ASSERT(ToRegister(instr->context()).is(rsi)); ASSERT(ToRegister(instr->left()).is(rdx)); ASSERT(ToRegister(instr->right()).is(rax)); - StringAddStub stub(instr->hydrogen()->flags(), + StringAddStub stub(isolate(), + instr->hydrogen()->flags(), instr->hydrogen()->pretenure_flag()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -4954,29 +5021,35 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register object_; }; - if (instr->hydrogen()->CanOmitMapChecks()) return; + if (instr->hydrogen()->IsStabilityCheck()) { + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); + for (int i = 0; i < maps->size(); ++i) { + AddStabilityDependency(maps->at(i).handle()); + } + return; + } LOperand* input = instr->value(); ASSERT(input->IsRegister()); Register reg = ToRegister(input); DeferredCheckMaps* deferred = NULL; - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { deferred = new(zone()) DeferredCheckMaps(this, instr, reg); __ bind(deferred->check_maps()); } - UniqueSet<Map> map_set = instr->hydrogen()->map_set(); + const UniqueSet<Map>* maps = instr->hydrogen()->maps(); Label success; - for (int i = 0; i < map_set.size() - 1; i++) { - Handle<Map> map = map_set.at(i).handle(); + for (int i = 0; i < maps->size() - 1; i++) { + Handle<Map> map = maps->at(i).handle(); __ CompareMap(reg, map); __ j(equal, &success, Label::kNear); } - Handle<Map> map = map_set.at(map_set.size() - 1).handle(); + Handle<Map> map = maps->at(maps->size() - 1).handle(); __ CompareMap(reg, map); - if (instr->hydrogen()->has_migration_target()) { + if (instr->hydrogen()->HasMigrationTarget()) { __ j(not_equal, deferred->entry()); } else { DeoptimizeIf(not_equal, instr->environment()); @@ -5042,7 +5115,7 @@ void LCodeGen::DoDoubleBits(LDoubleBits* instr) { Register result_reg = ToRegister(instr->result()); if (instr->hydrogen()->bits() == HDoubleBits::HIGH) { __ movq(result_reg, value_reg); - __ shr(result_reg, Immediate(32)); + __ shrq(result_reg, Immediate(32)); } else { __ movd(result_reg, value_reg); } @@ -5114,7 +5187,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { __ movl(temp, Immediate((size / kPointerSize) - 1)); } else { temp = ToRegister(instr->size()); - __ sar(temp, Immediate(kPointerSizeLog2)); + __ sarp(temp, Immediate(kPointerSizeLog2)); __ decl(temp); } Label loop; @@ -5229,10 +5302,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && instr->hydrogen()->has_no_literals()) { - FastNewClosureStub stub(instr->hydrogen()->strict_mode(), + FastNewClosureStub stub(isolate(), + instr->hydrogen()->strict_mode(), instr->hydrogen()->is_generator()); __ Move(rbx, instr->hydrogen()->shared_info()); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { __ Push(rsi); __ Push(instr->hydrogen()->shared_info()); @@ -5285,14 +5359,15 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Label::Distance false_distance = right_block == next_block ? Label::kNear : Label::kFar; Condition final_branch_condition = no_condition; - if (type_name->Equals(heap()->number_string())) { + Factory* factory = isolate()->factory(); + if (String::Equals(type_name, factory->number_string())) { __ JumpIfSmi(input, true_label, true_distance); __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset), Heap::kHeapNumberMapRootIndex); final_branch_condition = equal; - } else if (type_name->Equals(heap()->string_string())) { + } else if (String::Equals(type_name, factory->string_string())) { __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input); __ j(above_equal, false_label, false_distance); @@ -5300,22 +5375,23 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Immediate(1 << Map::kIsUndetectable)); final_branch_condition = zero; - } else if (type_name->Equals(heap()->symbol_string())) { + } else if (String::Equals(type_name, factory->symbol_string())) { __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, SYMBOL_TYPE, input); final_branch_condition = equal; - } else if (type_name->Equals(heap()->boolean_string())) { + } else if (String::Equals(type_name, factory->boolean_string())) { __ CompareRoot(input, Heap::kTrueValueRootIndex); __ j(equal, true_label, true_distance); __ CompareRoot(input, Heap::kFalseValueRootIndex); final_branch_condition = equal; - } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { + } else if (FLAG_harmony_typeof && + String::Equals(type_name, factory->null_string())) { __ CompareRoot(input, Heap::kNullValueRootIndex); final_branch_condition = equal; - } else if (type_name->Equals(heap()->undefined_string())) { + } else if (String::Equals(type_name, factory->undefined_string())) { __ CompareRoot(input, Heap::kUndefinedValueRootIndex); __ j(equal, true_label, true_distance); __ JumpIfSmi(input, false_label, false_distance); @@ -5325,7 +5401,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Immediate(1 << Map::kIsUndetectable)); final_branch_condition = not_zero; - } else if (type_name->Equals(heap()->function_string())) { + } else if (String::Equals(type_name, factory->function_string())) { STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ JumpIfSmi(input, false_label, false_distance); __ CmpObjectType(input, JS_FUNCTION_TYPE, input); @@ -5333,7 +5409,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE); final_branch_condition = equal; - } else if (type_name->Equals(heap()->object_string())) { + } else if (String::Equals(type_name, factory->object_string())) { __ JumpIfSmi(input, false_label, false_distance); if (!FLAG_harmony_typeof) { __ CompareRoot(input, Heap::kNullValueRootIndex); @@ -5567,11 +5643,55 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { } +void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register object, + Register index) { + PushSafepointRegistersScope scope(this); + __ Push(object); + __ Push(index); + __ xorp(rsi, rsi); + __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); + RecordSafepointWithRegisters( + instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); + __ StoreToSafepointRegisterSlot(object, rax); +} + + void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { + class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { + public: + DeferredLoadMutableDouble(LCodeGen* codegen, + LLoadFieldByIndex* instr, + Register object, + Register index) + : LDeferredCode(codegen), + instr_(instr), + object_(object), + index_(index) { + } + virtual void Generate() V8_OVERRIDE { + codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_); + } + virtual LInstruction* instr() V8_OVERRIDE { return instr_; } + private: + LLoadFieldByIndex* instr_; + Register object_; + Register index_; + }; + Register object = ToRegister(instr->object()); Register index = ToRegister(instr->index()); + DeferredLoadMutableDouble* deferred; + deferred = new(zone()) DeferredLoadMutableDouble(this, instr, object, index); + Label out_of_object, done; + __ Move(kScratchRegister, Smi::FromInt(1)); + __ testp(index, kScratchRegister); + __ j(not_zero, deferred->entry()); + + __ sarp(index, Immediate(1)); + __ SmiToInteger32(index, index); __ cmpl(index, Immediate(0)); __ j(less, &out_of_object, Label::kNear); @@ -5589,6 +5709,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { index, times_pointer_size, FixedArray::kHeaderSize - kPointerSize)); + __ bind(deferred->exit()); __ bind(&done); } diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h index 37807ede0..686dc857a 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.h +++ b/deps/v8/src/x64/lithium-codegen-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_ #define V8_X64_LITHIUM_CODEGEN_X64_H_ @@ -35,7 +12,7 @@ #include "lithium-codegen.h" #include "safepoint-table.h" #include "scopes.h" -#include "v8utils.h" +#include "utils.h" #include "x64/lithium-gap-resolver-x64.h" namespace v8 { @@ -116,6 +93,9 @@ class LCodeGen: public LCodeGenBase { void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, Label* map_check); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); + void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, + Register object, + Register index); // Parallel move support. void DoParallelMove(LParallelMove* move); @@ -147,8 +127,6 @@ class LCodeGen: public LCodeGenBase { int GetStackSlotCount() const { return chunk()->spill_slot_count(); } - void Abort(BailoutReason reason); - void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } @@ -224,7 +202,6 @@ class LCodeGen: public LCodeGenBase { LEnvironment* environment, Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition cc, LEnvironment* environment); - void ApplyCheckIf(Condition cc, LBoundsCheck* check); bool DeoptEveryNTimes() { return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); diff --git a/deps/v8/src/x64/lithium-gap-resolver-x64.cc b/deps/v8/src/x64/lithium-gap-resolver-x64.cc index 7c7fc29e0..7827abd16 100644 --- a/deps/v8/src/x64/lithium-gap-resolver-x64.cc +++ b/deps/v8/src/x64/lithium-gap-resolver-x64.cc @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" diff --git a/deps/v8/src/x64/lithium-gap-resolver-x64.h b/deps/v8/src/x64/lithium-gap-resolver-x64.h index f218455b6..5ceacb17d 100644 --- a/deps/v8/src/x64/lithium-gap-resolver-x64.h +++ b/deps/v8/src/x64/lithium-gap-resolver-x64.h @@ -1,29 +1,6 @@ // Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_LITHIUM_GAP_RESOLVER_X64_H_ #define V8_X64_LITHIUM_GAP_RESOLVER_X64_H_ diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc index 8c4f24e8f..eb9e7dd00 100644 --- a/deps/v8/src/x64/lithium-x64.cc +++ b/deps/v8/src/x64/lithium-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -179,12 +156,9 @@ template<int R> bool LTemplateResultInstruction<R>::MustSignExtendResult( LPlatformChunk* chunk) const { HValue* hvalue = this->hydrogen_value(); - - if (hvalue == NULL) return false; - if (!hvalue->representation().IsInteger32()) return false; - if (hvalue->HasRange() && !hvalue->range()->CanBeNegative()) return false; - - return chunk->GetDehoistedKeyIds()->Contains(hvalue->id()); + return hvalue != NULL && + hvalue->representation().IsInteger32() && + chunk->GetDehoistedKeyIds()->Contains(hvalue->id()); } @@ -465,7 +439,7 @@ LPlatformChunk* LChunkBuilder::Build() { } -void LCodeGen::Abort(BailoutReason reason) { +void LChunkBuilder::Abort(BailoutReason reason) { info()->set_bailout_reason(reason); status_ = ABORTED; } @@ -652,6 +626,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); + // We can't really figure out if the environment is needed or not. + instr->environment()->set_has_been_used(); } return instr; @@ -895,7 +871,8 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) { // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 - if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) { + if (!(instr->ClobbersRegisters() && + instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { @@ -1310,7 +1287,7 @@ LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) { } -LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { +LInstruction* LChunkBuilder::DoDivI(HDiv* instr) { ASSERT(instr->representation().IsSmiOrInteger32()); ASSERT(instr->left()->representation().Equals(instr->representation())); ASSERT(instr->right()->representation().Equals(instr->representation())); @@ -1322,8 +1299,7 @@ LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) { if (instr->CheckFlag(HValue::kCanBeDivByZero) || instr->CheckFlag(HValue::kBailoutOnMinusZero) || instr->CheckFlag(HValue::kCanOverflow) || - (!instr->IsMathFloorOfDiv() && - !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { + !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { result = AssignEnvironment(result); } return result; @@ -1387,13 +1363,31 @@ LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) { } +LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) { + ASSERT(instr->representation().IsSmiOrInteger32()); + ASSERT(instr->left()->representation().Equals(instr->representation())); + ASSERT(instr->right()->representation().Equals(instr->representation())); + LOperand* dividend = UseFixed(instr->left(), rax); + LOperand* divisor = UseRegister(instr->right()); + LOperand* temp = FixedTemp(rdx); + LInstruction* result = DefineFixed(new(zone()) LFlooringDivI( + dividend, divisor, temp), rax); + if (instr->CheckFlag(HValue::kCanBeDivByZero) || + instr->CheckFlag(HValue::kBailoutOnMinusZero) || + instr->CheckFlag(HValue::kCanOverflow)) { + result = AssignEnvironment(result); + } + return result; +} + + LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { if (instr->RightIsPowerOf2()) { return DoFlooringDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoFlooringDivByConstI(instr); } else { - return DoDivI(instr); + return DoFlooringDivI(instr); } } @@ -1515,14 +1509,19 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { ASSERT(instr->right()->representation().Equals(instr->representation())); LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); HValue* right_candidate = instr->BetterRightOperand(); - LOperand* right = use_lea - ? UseRegisterOrConstantAtStart(right_candidate) - : UseOrConstantAtStart(right_candidate); + LOperand* right; + if (instr->representation().IsSmi()) { + // We cannot add a tagged immediate to a tagged value, + // so we request it in a register. + right = UseRegisterAtStart(right_candidate); + } else { + right = use_lea ? UseRegisterOrConstantAtStart(right_candidate) + : UseOrConstantAtStart(right_candidate); + } LAddI* add = new(zone()) LAddI(left, right); bool can_overflow = instr->CheckFlag(HValue::kCanOverflow); - LInstruction* result = use_lea - ? DefineAsRegister(add) - : DefineSameAsFirst(add); + LInstruction* result = use_lea ? DefineAsRegister(add) + : DefineSameAsFirst(add); if (can_overflow) { result = AssignEnvironment(result); } @@ -1600,6 +1599,8 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { + LInstruction* goto_instr = CheckElideControlInstruction(instr); + if (goto_instr != NULL) return goto_instr; Representation r = instr->representation(); if (r.IsSmiOrInteger32()) { ASSERT(instr->left()->representation().Equals(r)); @@ -1768,9 +1769,16 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - LOperand* value = UseRegisterOrConstantAtStart(instr->index()); - LOperand* length = Use(instr->length()); - return AssignEnvironment(new(zone()) LBoundsCheck(value, length)); + if (!FLAG_debug_code && instr->skip_check()) return NULL; + LOperand* index = UseRegisterOrConstantAtStart(instr->index()); + LOperand* length = !index->IsConstantOperand() + ? UseOrConstantAtStart(instr->length()) + : UseAtStart(instr->length()); + LInstruction* result = new(zone()) LBoundsCheck(index, length); + if (!FLAG_debug_code || !instr->skip_check()) { + result = AssignEnvironment(result); + } + return result; } @@ -1804,26 +1812,21 @@ LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) { LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); + HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LDummyUse(value)); } from = Representation::Tagged(); } - // Only mark conversions that might need to allocate as calling rather than - // all changes. This makes simple, non-allocating conversion not have to force - // building a stack frame. if (from.IsTagged()) { if (to.IsDouble()) { - LOperand* value = UseRegister(instr->value()); - LInstruction* res = DefineAsRegister(new(zone()) LNumberUntagD(value)); - if (!instr->value()->representation().IsSmi()) { - res = AssignEnvironment(res); - } - return res; + LOperand* value = UseRegister(val); + LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value)); + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); if (val->type().IsSmi()) { return DefineSameAsFirst(new(zone()) LDummyUse(value)); @@ -1831,78 +1834,70 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value))); } else { ASSERT(to.IsInteger32()); - HValue* val = instr->value(); - LOperand* value = UseRegister(val); if (val->type().IsSmi() || val->representation().IsSmi()) { + LOperand* value = UseRegister(val); return DefineSameAsFirst(new(zone()) LSmiUntag(value, false)); } else { + LOperand* value = UseRegister(val); bool truncating = instr->CanTruncateToInt32(); LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1); - LInstruction* res = + LInstruction* result = DefineSameAsFirst(new(zone()) LTaggedToI(value, xmm_temp)); - if (!instr->value()->representation().IsSmi()) { - // Note: Only deopts in deferred code. - res = AssignEnvironment(res); - } - return res; + if (!val->representation().IsSmi()) result = AssignEnvironment(result); + return result; } } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LOperand* temp = TempRegister(); - - // Make sure that temp and result_temp are different registers. LUnallocated* result_temp = TempRegister(); LNumberTagD* result = new(zone()) LNumberTagD(value, temp); return AssignPointerMap(Define(result, result_temp)); } else if (to.IsSmi()) { - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); return AssignEnvironment( DefineAsRegister(new(zone()) LDoubleToSmi(value))); } else { ASSERT(to.IsInteger32()); - LOperand* value = UseRegister(instr->value()); + LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value)); - if (!instr->CanTruncateToInt32()) { - result = AssignEnvironment(result); - } + if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result); return result; } } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { - HValue* val = instr->value(); - LOperand* value = UseRegister(val); if (!instr->CheckFlag(HValue::kCanOverflow)) { + LOperand* value = UseRegister(val); return DefineAsRegister(new(zone()) LSmiTag(value)); } else if (val->CheckFlag(HInstruction::kUint32)) { + LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = FixedTemp(xmm1); LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2); return AssignPointerMap(DefineSameAsFirst(result)); } else { + LOperand* value = UseRegister(val); LNumberTagI* result = new(zone()) LNumberTagI(value); return AssignPointerMap(DefineSameAsFirst(result)); } } else if (to.IsSmi()) { - HValue* val = instr->value(); LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value)); if (instr->CheckFlag(HValue::kCanOverflow)) { - ASSERT(val->CheckFlag(HValue::kUint32)); result = AssignEnvironment(result); } return result; } else { - if (instr->value()->CheckFlag(HInstruction::kUint32)) { + ASSERT(to.IsDouble()); + if (val->CheckFlag(HInstruction::kUint32)) { LOperand* temp = FixedTemp(xmm1); return DefineAsRegister( - new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp)); + new(zone()) LUint32ToDouble(UseRegister(val), temp)); } else { - ASSERT(to.IsDouble()); - LOperand* value = Use(instr->value()); + LOperand* value = Use(val); return DefineAsRegister(new(zone()) LInteger32ToDouble(value)); } } @@ -1914,7 +1909,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) { LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckNonSmi(value)); + LInstruction* result = new(zone()) LCheckNonSmi(value); + if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result); + return result; } @@ -1938,16 +1935,12 @@ LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { - LOperand* value = NULL; - if (!instr->CanOmitMapChecks()) { - value = UseRegisterAtStart(instr->value()); - if (instr->has_migration_target()) info()->MarkAsDeferredCalling(); - } - LCheckMaps* result = new(zone()) LCheckMaps(value); - if (!instr->CanOmitMapChecks()) { - // Note: Only deopts in deferred code. - AssignEnvironment(result); - if (instr->has_migration_target()) return AssignPointerMap(result); + if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps; + LOperand* value = UseRegisterAtStart(instr->value()); + LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value)); + if (instr->HasMigrationTarget()) { + info()->MarkAsDeferredCalling(); + result = AssignPointerMap(result); } return result; } @@ -2243,7 +2236,6 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { - LOperand* object = UseRegister(instr->object()); if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { LOperand* object = UseRegister(instr->object()); LOperand* new_map_reg = TempRegister(); @@ -2252,10 +2244,11 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind( object, NULL, new_map_reg, temp_reg); return result; } else { + LOperand* object = UseFixed(instr->object(), rax); LOperand* context = UseFixed(instr->context(), rsi); LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(object, context, NULL, NULL); - return AssignPointerMap(result); + return MarkAsCall(result, instr); } } @@ -2411,7 +2404,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { } else { ASSERT(info()->IsStub()); CodeStubInterfaceDescriptor* descriptor = - info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); + info()->code_stub()->GetInterfaceDescriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor->GetParameterRegister(index); return DefineFixed(result, reg); @@ -2528,6 +2521,7 @@ LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); + outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining(instr->closure(), instr->arguments_count(), @@ -2589,7 +2583,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegister(instr->object()); LOperand* index = UseTempRegister(instr->index()); - return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); + LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index); + LInstruction* result = DefineSameAsFirst(load); + return AssignPointerMap(result); } diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h index 9d9ac1ea1..093b95b4d 100644 --- a/deps/v8/src/x64/lithium-x64.h +++ b/deps/v8/src/x64/lithium-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_LITHIUM_X64_H_ #define V8_X64_LITHIUM_X64_H_ @@ -97,6 +74,7 @@ class LCodeGen; V(Dummy) \ V(FlooringDivByConstI) \ V(FlooringDivByPowerOf2I) \ + V(FlooringDivI) \ V(ForInCacheArray) \ V(ForInPrepareMap) \ V(FunctionLiteral) \ @@ -256,7 +234,9 @@ class LInstruction : public ZoneObject { // Interface to the register allocator and iterators. bool ClobbersTemps() const { return IsCall(); } bool ClobbersRegisters() const { return IsCall(); } - virtual bool ClobbersDoubleRegisters() const { return IsCall(); } + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const { + return IsCall(); + } virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } @@ -732,14 +712,14 @@ class LDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 2> { class LDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { public: - LDivI(LOperand* left, LOperand* right, LOperand* temp) { - inputs_[0] = left; - inputs_[1] = right; + LDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; temps_[0] = temp; } - LOperand* left() { return inputs_[0]; } - LOperand* right() { return inputs_[1]; } + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i") @@ -794,6 +774,23 @@ class LFlooringDivByConstI V8_FINAL : public LTemplateInstruction<1, 1, 3> { }; +class LFlooringDivI V8_FINAL : public LTemplateInstruction<1, 2, 1> { + public: + LFlooringDivI(LOperand* dividend, LOperand* divisor, LOperand* temp) { + inputs_[0] = dividend; + inputs_[1] = divisor; + temps_[0] = temp; + } + + LOperand* dividend() { return inputs_[0]; } + LOperand* divisor() { return inputs_[1]; } + LOperand* temp() { return temps_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(FlooringDivI, "flooring-div-i") + DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) +}; + + class LMulI V8_FINAL : public LTemplateInstruction<1, 2, 0> { public: LMulI(LOperand* left, LOperand* right) { @@ -1228,6 +1225,9 @@ class LBitI V8_FINAL : public LTemplateInstruction<1, 2, 0> { LOperand* right() { return inputs_[1]; } Token::Value op() const { return hydrogen()->op(); } + bool IsInteger32() const { + return hydrogen()->representation().IsInteger32(); + } DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i") DECLARE_HYDROGEN_ACCESSOR(Bitwise) @@ -1944,7 +1944,7 @@ class LCallRuntime V8_FINAL : public LTemplateInstruction<1, 1, 0> { DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime") DECLARE_HYDROGEN_ACCESSOR(CallRuntime) - virtual bool ClobbersDoubleRegisters() const V8_OVERRIDE { + virtual bool ClobbersDoubleRegisters(Isolate* isolate) const V8_OVERRIDE { return save_doubles() == kDontSaveFPRegs; } @@ -2130,7 +2130,6 @@ class LStoreNamedField V8_FINAL : public LTemplateInstruction<0, 2, 1> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; - Handle<Map> transition() const { return hydrogen()->transition_map(); } Representation representation() const { return hydrogen()->field_representation(); } @@ -2339,7 +2338,7 @@ class LCheckInstanceType V8_FINAL : public LTemplateInstruction<0, 1, 0> { class LCheckMaps V8_FINAL : public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMaps(LOperand* value) { + explicit LCheckMaps(LOperand* value = NULL) { inputs_[0] = value; } @@ -2661,6 +2660,8 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { next_block_(NULL), allocator_(allocator) { } + Isolate* isolate() const { return graph_->isolate(); } + // Build the sequence for the graph. LPlatformChunk* Build(); @@ -2681,12 +2682,13 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase { LInstruction* DoMathClz32(HUnaryMathOperation* instr); LInstruction* DoDivByPowerOf2I(HDiv* instr); LInstruction* DoDivByConstI(HDiv* instr); - LInstruction* DoDivI(HBinaryOperation* instr); + LInstruction* DoDivI(HDiv* instr); LInstruction* DoModByPowerOf2I(HMod* instr); LInstruction* DoModByConstI(HMod* instr); LInstruction* DoModI(HMod* instr); LInstruction* DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr); LInstruction* DoFlooringDivByConstI(HMathFloorOfDiv* instr); + LInstruction* DoFlooringDivI(HMathFloorOfDiv* instr); private: enum Status { diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc index 6f313f7a6..17db9bf1a 100644 --- a/deps/v8/src/x64/macro-assembler-x64.cc +++ b/deps/v8/src/x64/macro-assembler-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -54,10 +31,10 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) } -static const int kInvalidRootRegisterDelta = -1; +static const int64_t kInvalidRootRegisterDelta = -1; -intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) { +int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) { if (predictable_code_size() && (other.address() < reinterpret_cast<Address>(isolate()) || other.address() >= reinterpret_cast<Address>(isolate() + 1))) { @@ -65,17 +42,27 @@ intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) { } Address roots_register_value = kRootRegisterBias + reinterpret_cast<Address>(isolate()->heap()->roots_array_start()); - intptr_t delta = other.address() - roots_register_value; + + int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization. + if (kPointerSize == kInt64Size) { + delta = other.address() - roots_register_value; + } else { + // For x32, zero extend the address to 64-bit and calculate the delta. + uint64_t o = static_cast<uint32_t>( + reinterpret_cast<intptr_t>(other.address())); + uint64_t r = static_cast<uint32_t>( + reinterpret_cast<intptr_t>(roots_register_value)); + delta = o - r; + } return delta; } Operand MacroAssembler::ExternalOperand(ExternalReference target, Register scratch) { - if (root_array_available_ && !Serializer::enabled()) { - intptr_t delta = RootRegisterDelta(target); + if (root_array_available_ && !Serializer::enabled(isolate())) { + int64_t delta = RootRegisterDelta(target); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { - Serializer::TooLateToEnableNow(); return Operand(kRootRegister, static_cast<int32_t>(delta)); } } @@ -85,10 +72,9 @@ Operand MacroAssembler::ExternalOperand(ExternalReference target, void MacroAssembler::Load(Register destination, ExternalReference source) { - if (root_array_available_ && !Serializer::enabled()) { - intptr_t delta = RootRegisterDelta(source); + if (root_array_available_ && !Serializer::enabled(isolate())) { + int64_t delta = RootRegisterDelta(source); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { - Serializer::TooLateToEnableNow(); movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); return; } @@ -104,10 +90,9 @@ void MacroAssembler::Load(Register destination, ExternalReference source) { void MacroAssembler::Store(ExternalReference destination, Register source) { - if (root_array_available_ && !Serializer::enabled()) { - intptr_t delta = RootRegisterDelta(destination); + if (root_array_available_ && !Serializer::enabled(isolate())) { + int64_t delta = RootRegisterDelta(destination); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { - Serializer::TooLateToEnableNow(); movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source); return; } @@ -124,10 +109,9 @@ void MacroAssembler::Store(ExternalReference destination, Register source) { void MacroAssembler::LoadAddress(Register destination, ExternalReference source) { - if (root_array_available_ && !Serializer::enabled()) { - intptr_t delta = RootRegisterDelta(source); + if (root_array_available_ && !Serializer::enabled(isolate())) { + int64_t delta = RootRegisterDelta(source); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { - Serializer::TooLateToEnableNow(); leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); return; } @@ -138,13 +122,12 @@ void MacroAssembler::LoadAddress(Register destination, int MacroAssembler::LoadAddressSize(ExternalReference source) { - if (root_array_available_ && !Serializer::enabled()) { + if (root_array_available_ && !Serializer::enabled(isolate())) { // This calculation depends on the internals of LoadAddress. // It's correctness is ensured by the asserts in the Call // instruction below. - intptr_t delta = RootRegisterDelta(source); + int64_t delta = RootRegisterDelta(source); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { - Serializer::TooLateToEnableNow(); // Operand is leap(scratch, Operand(kRootRegister, delta)); // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. int size = 4; @@ -161,7 +144,7 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) { void MacroAssembler::PushAddress(ExternalReference source) { int64_t address = reinterpret_cast<int64_t>(source.address()); - if (is_int32(address) && !Serializer::enabled()) { + if (is_int32(address) && !Serializer::enabled(isolate())) { if (emit_debug_code()) { Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone()); } @@ -253,7 +236,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. j(equal, &done, Label::kNear); } StoreBufferOverflowStub store_buffer_overflow = - StoreBufferOverflowStub(save_fp); + StoreBufferOverflowStub(isolate(), save_fp); CallStub(&store_buffer_overflow); if (and_then == kReturnAtEnd) { ret(0); @@ -269,7 +252,7 @@ void MacroAssembler::InNewSpace(Register object, Condition cc, Label* branch, Label::Distance distance) { - if (Serializer::enabled()) { + if (Serializer::enabled(isolate())) { // Can't do arithmetic on external references if it might get serialized. // The mask isn't really an address. We load it as an external reference in // case the size of the new space is different between the snapshot maker @@ -431,7 +414,8 @@ void MacroAssembler::RecordWrite(Register object, &done, Label::kNear); - RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); + RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, + fp_mode); CallStub(&stub); bind(&done); @@ -538,12 +522,12 @@ void MacroAssembler::Abort(BailoutReason reason) { void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs - Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); + Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); } void MacroAssembler::TailCallStub(CodeStub* stub) { - Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET); + Jump(stub->GetCode(), RelocInfo::CODE_TARGET); } @@ -558,14 +542,6 @@ bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { } -void MacroAssembler::IllegalOperation(int num_arguments) { - if (num_arguments > 0) { - addp(rsp, Immediate(num_arguments * kPointerSize)); - } - LoadRoot(rax, Heap::kUndefinedValueRootIndex); -} - - void MacroAssembler::IndexFromHash(Register hash, Register index) { // The assert checks that the constants for the maximum number of digits // for an array index cached in the hash field and the number of bits @@ -577,7 +553,7 @@ void MacroAssembler::IndexFromHash(Register hash, Register index) { // key: string key // hash: key's hash field, including its array index value. andp(hash, Immediate(String::kArrayIndexValueMask)); - shr(hash, Immediate(String::kHashShift)); + shrp(hash, Immediate(String::kHashShift)); // Here we actually clobber the key which will be used if calling into // runtime later. However as the new key is the numeric value of a string key // there is no difference in using either key. @@ -591,10 +567,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // If the expected number of arguments of the runtime function is // constant, we check that the actual number of arguments match the // expectation. - if (f->nargs >= 0 && f->nargs != num_arguments) { - IllegalOperation(num_arguments); - return; - } + CHECK(f->nargs < 0 || f->nargs == num_arguments); // TODO(1236192): Most runtime routines don't need the number of // arguments passed in because it is constant. At some point we @@ -602,7 +575,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // smarter. Set(rax, num_arguments); LoadAddress(rbx, ExternalReference(f, isolate())); - CEntryStub ces(f->result_size, save_doubles); + CEntryStub ces(isolate(), f->result_size, save_doubles); CallStub(&ces); } @@ -612,7 +585,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext, Set(rax, num_arguments); LoadAddress(rbx, ext); - CEntryStub stub(1); + CEntryStub stub(isolate(), 1); CallStub(&stub); } @@ -660,7 +633,7 @@ void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) { void MacroAssembler::CallApiFunctionAndReturn( Register function_address, - Address thunk_address, + ExternalReference thunk_ref, Register thunk_last_arg, int stack_space, Operand return_value_operand, @@ -707,16 +680,13 @@ void MacroAssembler::CallApiFunctionAndReturn( Label profiler_disabled; Label end_profiler_check; - bool* is_profiling_flag = - isolate()->cpu_profiler()->is_profiling_address(); - STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); - Move(rax, is_profiling_flag, RelocInfo::EXTERNAL_REFERENCE); + Move(rax, ExternalReference::is_profiling_address(isolate())); cmpb(Operand(rax, 0), Immediate(0)); j(zero, &profiler_disabled); // Third parameter is the address of the actual getter function. Move(thunk_last_arg, function_address); - Move(rax, thunk_address, RelocInfo::EXTERNAL_REFERENCE); + Move(rax, thunk_ref); jmp(&end_profiler_check); bind(&profiler_disabled); @@ -821,8 +791,8 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, int result_size) { // Set the entry point and jump to the C entry runtime stub. LoadAddress(rbx, ext); - CEntryStub ces(result_size); - jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET); + CEntryStub ces(isolate(), result_size); + jmp(ces.GetCode(), RelocInfo::CODE_TARGET); } @@ -981,7 +951,6 @@ void MacroAssembler::Set(const Operand& dst, intptr_t x) { movp(dst, kScratchRegister); } } else { - ASSERT(kPointerSize == kInt32Size); movp(dst, Immediate(static_cast<int32_t>(x))); } } @@ -998,11 +967,18 @@ bool MacroAssembler::IsUnsafeInt(const int32_t x) { void MacroAssembler::SafeMove(Register dst, Smi* src) { ASSERT(!dst.is(kScratchRegister)); - ASSERT(SmiValuesAre32Bits()); // JIT cookie can be converted to Smi. if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { - Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); - Move(kScratchRegister, Smi::FromInt(jit_cookie())); - xorq(dst, kScratchRegister); + if (SmiValuesAre32Bits()) { + // JIT cookie can be converted to Smi. + Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); + Move(kScratchRegister, Smi::FromInt(jit_cookie())); + xorp(dst, kScratchRegister); + } else { + ASSERT(SmiValuesAre31Bits()); + int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src)); + movp(dst, Immediate(value ^ jit_cookie())); + xorp(dst, Immediate(jit_cookie())); + } } else { Move(dst, src); } @@ -1010,11 +986,18 @@ void MacroAssembler::SafeMove(Register dst, Smi* src) { void MacroAssembler::SafePush(Smi* src) { - ASSERT(SmiValuesAre32Bits()); // JIT cookie can be converted to Smi. if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { - Push(Smi::FromInt(src->value() ^ jit_cookie())); - Move(kScratchRegister, Smi::FromInt(jit_cookie())); - xorq(Operand(rsp, 0), kScratchRegister); + if (SmiValuesAre32Bits()) { + // JIT cookie can be converted to Smi. + Push(Smi::FromInt(src->value() ^ jit_cookie())); + Move(kScratchRegister, Smi::FromInt(jit_cookie())); + xorp(Operand(rsp, 0), kScratchRegister); + } else { + ASSERT(SmiValuesAre31Bits()); + int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src)); + Push(Immediate(value ^ jit_cookie())); + xorp(Operand(rsp, 0), Immediate(jit_cookie())); + } } else { Push(src); } @@ -1096,7 +1079,7 @@ void MacroAssembler::Integer32ToSmi(Register dst, Register src) { if (!dst.is(src)) { movl(dst, src); } - shl(dst, Immediate(kSmiShift)); + shlp(dst, Immediate(kSmiShift)); } @@ -1108,8 +1091,15 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); bind(&ok); } - ASSERT(kSmiShift % kBitsPerByte == 0); - movl(Operand(dst, kSmiShift / kBitsPerByte), src); + + if (SmiValuesAre32Bits()) { + ASSERT(kSmiShift % kBitsPerByte == 0); + movl(Operand(dst, kSmiShift / kBitsPerByte), src); + } else { + ASSERT(SmiValuesAre31Bits()); + Integer32ToSmi(kScratchRegister, src); + movp(dst, kScratchRegister); + } } @@ -1121,7 +1111,7 @@ void MacroAssembler::Integer64PlusConstantToSmi(Register dst, } else { leal(dst, Operand(src, constant)); } - shl(dst, Immediate(kSmiShift)); + shlp(dst, Immediate(kSmiShift)); } @@ -1130,12 +1120,24 @@ void MacroAssembler::SmiToInteger32(Register dst, Register src) { if (!dst.is(src)) { movp(dst, src); } - shr(dst, Immediate(kSmiShift)); + + if (SmiValuesAre32Bits()) { + shrp(dst, Immediate(kSmiShift)); + } else { + ASSERT(SmiValuesAre31Bits()); + sarl(dst, Immediate(kSmiShift)); + } } void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { - movl(dst, Operand(src, kSmiShift / kBitsPerByte)); + if (SmiValuesAre32Bits()) { + movl(dst, Operand(src, kSmiShift / kBitsPerByte)); + } else { + ASSERT(SmiValuesAre31Bits()); + movl(dst, src); + sarl(dst, Immediate(kSmiShift)); + } } @@ -1144,12 +1146,22 @@ void MacroAssembler::SmiToInteger64(Register dst, Register src) { if (!dst.is(src)) { movp(dst, src); } - sar(dst, Immediate(kSmiShift)); + sarp(dst, Immediate(kSmiShift)); + if (kPointerSize == kInt32Size) { + // Sign extend to 64-bit. + movsxlq(dst, dst); + } } void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { - movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); + if (SmiValuesAre32Bits()) { + movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); + } else { + ASSERT(SmiValuesAre31Bits()); + movp(dst, src); + SmiToInteger64(dst, dst); + } } @@ -1199,7 +1211,12 @@ void MacroAssembler::SmiCompare(const Operand& dst, Register src) { void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { AssertSmi(dst); - cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); + if (SmiValuesAre32Bits()) { + cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); + } else { + ASSERT(SmiValuesAre31Bits()); + cmpl(dst, Immediate(src)); + } } @@ -1212,7 +1229,13 @@ void MacroAssembler::Cmp(const Operand& dst, Smi* src) { void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { - cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); + if (SmiValuesAre32Bits()) { + cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); + } else { + ASSERT(SmiValuesAre31Bits()); + SmiToInteger32(kScratchRegister, dst); + cmpl(kScratchRegister, src); + } } @@ -1229,9 +1252,9 @@ void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, movp(dst, src); } if (power < kSmiShift) { - sar(dst, Immediate(kSmiShift - power)); + sarp(dst, Immediate(kSmiShift - power)); } else if (power > kSmiShift) { - shl(dst, Immediate(power - kSmiShift)); + shlp(dst, Immediate(power - kSmiShift)); } } @@ -1241,7 +1264,7 @@ void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, int power) { ASSERT((0 <= power) && (power < 32)); if (dst.is(src)) { - shr(dst, Immediate(power + kSmiShift)); + shrp(dst, Immediate(power + kSmiShift)); } else { UNIMPLEMENTED(); // Not used. } @@ -1284,7 +1307,7 @@ Condition MacroAssembler::CheckNonNegativeSmi(Register src) { STATIC_ASSERT(kSmiTag == 0); // Test that both bits of the mask 0x8000000000000001 are zero. movp(kScratchRegister, src); - rol(kScratchRegister, Immediate(1)); + rolp(kScratchRegister, Immediate(1)); testb(kScratchRegister, Immediate(3)); return zero; } @@ -1295,8 +1318,15 @@ Condition MacroAssembler::CheckBothSmi(Register first, Register second) { return CheckSmi(first); } STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); - leal(kScratchRegister, Operand(first, second, times_1, 0)); - testb(kScratchRegister, Immediate(0x03)); + if (SmiValuesAre32Bits()) { + leal(kScratchRegister, Operand(first, second, times_1, 0)); + testb(kScratchRegister, Immediate(0x03)); + } else { + ASSERT(SmiValuesAre31Bits()); + movl(kScratchRegister, first); + orl(kScratchRegister, second); + testb(kScratchRegister, Immediate(kSmiTagMask)); + } return zero; } @@ -1308,7 +1338,7 @@ Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, } movp(kScratchRegister, first); orp(kScratchRegister, second); - rol(kScratchRegister, Immediate(1)); + rolp(kScratchRegister, Immediate(1)); testl(kScratchRegister, Immediate(3)); return zero; } @@ -1342,16 +1372,28 @@ Condition MacroAssembler::CheckIsMinSmi(Register src) { Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { - // A 32-bit integer value can always be converted to a smi. - return always; + if (SmiValuesAre32Bits()) { + // A 32-bit integer value can always be converted to a smi. + return always; + } else { + ASSERT(SmiValuesAre31Bits()); + cmpl(src, Immediate(0xc0000000)); + return positive; + } } Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { - // An unsigned 32-bit integer value is valid as long as the high bit - // is not set. - testl(src, src); - return positive; + if (SmiValuesAre32Bits()) { + // An unsigned 32-bit integer value is valid as long as the high bit + // is not set. + testl(src, src); + return positive; + } else { + ASSERT(SmiValuesAre31Bits()); + testl(src, Immediate(0xc0000000)); + return zero; + } } @@ -1494,7 +1536,13 @@ void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { if (constant->value() != 0) { - addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); + if (SmiValuesAre32Bits()) { + addl(Operand(dst, kSmiShift / kBitsPerByte), + Immediate(constant->value())); + } else { + ASSERT(SmiValuesAre31Bits()); + addp(dst, Immediate(constant)); + } } } @@ -1952,8 +2000,14 @@ void MacroAssembler::SmiMod(Register dst, void MacroAssembler::SmiNot(Register dst, Register src) { ASSERT(!dst.is(kScratchRegister)); ASSERT(!src.is(kScratchRegister)); - // Set tag and padding bits before negating, so that they are zero afterwards. - movl(kScratchRegister, Immediate(~0)); + if (SmiValuesAre32Bits()) { + // Set tag and padding bits before negating, so that they are zero + // afterwards. + movl(kScratchRegister, Immediate(~0)); + } else { + ASSERT(SmiValuesAre31Bits()); + movl(kScratchRegister, Immediate(1)); + } if (dst.is(src)) { xorp(dst, kScratchRegister); } else { @@ -2034,8 +2088,8 @@ void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst, ASSERT(is_uint5(shift_value)); if (shift_value > 0) { if (dst.is(src)) { - sar(dst, Immediate(shift_value + kSmiShift)); - shl(dst, Immediate(kSmiShift)); + sarp(dst, Immediate(shift_value + kSmiShift)); + shlp(dst, Immediate(kSmiShift)); } else { UNIMPLEMENTED(); // Not used. } @@ -2050,7 +2104,7 @@ void MacroAssembler::SmiShiftLeftConstant(Register dst, movp(dst, src); } if (shift_value > 0) { - shl(dst, Immediate(shift_value)); + shlp(dst, Immediate(shift_value)); } } @@ -2067,8 +2121,8 @@ void MacroAssembler::SmiShiftLogicalRightConstant( testp(dst, dst); j(negative, on_not_smi_result, near_jump); } - shr(dst, Immediate(shift_value + kSmiShift)); - shl(dst, Immediate(kSmiShift)); + shrq(dst, Immediate(shift_value + kSmiShift)); + shlq(dst, Immediate(kSmiShift)); } } @@ -2084,7 +2138,7 @@ void MacroAssembler::SmiShiftLeft(Register dst, SmiToInteger32(rcx, src2); // Shift amount specified by lower 5 bits, not six as the shl opcode. andq(rcx, Immediate(0x1f)); - shl_cl(dst); + shlq_cl(dst); } @@ -2107,8 +2161,8 @@ void MacroAssembler::SmiShiftLogicalRight(Register dst, } SmiToInteger32(rcx, src2); orl(rcx, Immediate(kSmiShift)); - shr_cl(dst); // Shift is rcx modulo 0x1f + 32. - shl(dst, Immediate(kSmiShift)); + shrq_cl(dst); // Shift is rcx modulo 0x1f + 32. + shlq(dst, Immediate(kSmiShift)); testq(dst, dst); if (src1.is(rcx) || src2.is(rcx)) { Label positive_result; @@ -2144,8 +2198,8 @@ void MacroAssembler::SmiShiftArithmeticRight(Register dst, } SmiToInteger32(rcx, src2); orl(rcx, Immediate(kSmiShift)); - sar_cl(dst); // Shift 32 + original rcx & 0x1f. - shl(dst, Immediate(kSmiShift)); + sarp_cl(dst); // Shift 32 + original rcx & 0x1f. + shlp(dst, Immediate(kSmiShift)); if (src1.is(rcx)) { movp(src1, kScratchRegister); } else if (src2.is(rcx)) { @@ -2194,41 +2248,78 @@ void MacroAssembler::SelectNonSmi(Register dst, SmiIndex MacroAssembler::SmiToIndex(Register dst, Register src, int shift) { - ASSERT(is_uint6(shift)); - // There is a possible optimization if shift is in the range 60-63, but that - // will (and must) never happen. - if (!dst.is(src)) { - movq(dst, src); - } - if (shift < kSmiShift) { - sar(dst, Immediate(kSmiShift - shift)); + if (SmiValuesAre32Bits()) { + ASSERT(is_uint6(shift)); + // There is a possible optimization if shift is in the range 60-63, but that + // will (and must) never happen. + if (!dst.is(src)) { + movp(dst, src); + } + if (shift < kSmiShift) { + sarp(dst, Immediate(kSmiShift - shift)); + } else { + shlp(dst, Immediate(shift - kSmiShift)); + } + return SmiIndex(dst, times_1); } else { - shl(dst, Immediate(shift - kSmiShift)); + ASSERT(SmiValuesAre31Bits()); + ASSERT(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1)); + if (!dst.is(src)) { + movp(dst, src); + } + // We have to sign extend the index register to 64-bit as the SMI might + // be negative. + movsxlq(dst, dst); + if (shift == times_1) { + sarq(dst, Immediate(kSmiShift)); + return SmiIndex(dst, times_1); + } + return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1)); } - return SmiIndex(dst, times_1); } + SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, Register src, int shift) { - // Register src holds a positive smi. - ASSERT(is_uint6(shift)); - if (!dst.is(src)) { - movq(dst, src); - } - negq(dst); - if (shift < kSmiShift) { - sar(dst, Immediate(kSmiShift - shift)); + if (SmiValuesAre32Bits()) { + // Register src holds a positive smi. + ASSERT(is_uint6(shift)); + if (!dst.is(src)) { + movp(dst, src); + } + negp(dst); + if (shift < kSmiShift) { + sarp(dst, Immediate(kSmiShift - shift)); + } else { + shlp(dst, Immediate(shift - kSmiShift)); + } + return SmiIndex(dst, times_1); } else { - shl(dst, Immediate(shift - kSmiShift)); + ASSERT(SmiValuesAre31Bits()); + ASSERT(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1)); + if (!dst.is(src)) { + movp(dst, src); + } + negq(dst); + if (shift == times_1) { + sarq(dst, Immediate(kSmiShift)); + return SmiIndex(dst, times_1); + } + return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1)); } - return SmiIndex(dst, times_1); } void MacroAssembler::AddSmiField(Register dst, const Operand& src) { - ASSERT_EQ(0, kSmiShift % kBitsPerByte); - addl(dst, Operand(src, kSmiShift / kBitsPerByte)); + if (SmiValuesAre32Bits()) { + ASSERT_EQ(0, kSmiShift % kBitsPerByte); + addl(dst, Operand(src, kSmiShift / kBitsPerByte)); + } else { + ASSERT(SmiValuesAre31Bits()); + SmiToInteger32(kScratchRegister, src); + addl(dst, kScratchRegister); + } } @@ -2243,32 +2334,39 @@ void MacroAssembler::Push(Smi* source) { } -void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) { +void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) { + ASSERT(!src.is(scratch)); movp(scratch, src); // High bits. - shr(src, Immediate(64 - kSmiShift)); - shl(src, Immediate(kSmiShift)); + shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift)); + shlp(src, Immediate(kSmiShift)); Push(src); // Low bits. - shl(scratch, Immediate(kSmiShift)); + shlp(scratch, Immediate(kSmiShift)); Push(scratch); } -void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) { +void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) { + ASSERT(!dst.is(scratch)); Pop(scratch); // Low bits. - shr(scratch, Immediate(kSmiShift)); + shrp(scratch, Immediate(kSmiShift)); Pop(dst); - shr(dst, Immediate(kSmiShift)); + shrp(dst, Immediate(kSmiShift)); // High bits. - shl(dst, Immediate(64 - kSmiShift)); + shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift)); orp(dst, scratch); } void MacroAssembler::Test(const Operand& src, Smi* source) { - testl(Operand(src, kIntSize), Immediate(source->value())); + if (SmiValuesAre32Bits()) { + testl(Operand(src, kIntSize), Immediate(source->value())); + } else { + ASSERT(SmiValuesAre31Bits()); + testl(src, Immediate(source)); + } } @@ -2315,7 +2413,7 @@ void MacroAssembler::LookupNumberStringCache(Register object, // but times_twice_pointer_size (multiplication by 16) scale factor // is not supported by addrmode on x64 platform. // So we have to premultiply entry index before lookup. - shl(scratch, Immediate(kPointerSizeLog2 + 1)); + shlp(scratch, Immediate(kPointerSizeLog2 + 1)); Register index = scratch; Register probe = mask; @@ -2338,7 +2436,7 @@ void MacroAssembler::LookupNumberStringCache(Register object, // but times_twice_pointer_size (multiplication by 16) scale factor // is not supported by addrmode on x64 platform. // So we have to premultiply entry index before lookup. - shl(scratch, Immediate(kPointerSizeLog2 + 1)); + shlp(scratch, Immediate(kPointerSizeLog2 + 1)); // Check if the entry is the smi we are looking for. cmpp(object, @@ -2576,11 +2674,24 @@ void MacroAssembler::Drop(int stack_elements) { } +void MacroAssembler::DropUnderReturnAddress(int stack_elements, + Register scratch) { + ASSERT(stack_elements > 0); + if (kPointerSize == kInt64Size && stack_elements == 1) { + popq(MemOperand(rsp, 0)); + return; + } + + PopReturnAddressTo(scratch); + Drop(stack_elements); + PushReturnAddressFrom(scratch); +} + + void MacroAssembler::Push(Register src) { if (kPointerSize == kInt64Size) { pushq(src); } else { - ASSERT(kPointerSize == kInt32Size); // x32 uses 64-bit push for rbp in the prologue. ASSERT(src.code() != rbp.code()); leal(rsp, Operand(rsp, -4)); @@ -2593,7 +2704,6 @@ void MacroAssembler::Push(const Operand& src) { if (kPointerSize == kInt64Size) { pushq(src); } else { - ASSERT(kPointerSize == kInt32Size); movp(kScratchRegister, src); leal(rsp, Operand(rsp, -4)); movp(Operand(rsp, 0), kScratchRegister); @@ -2601,11 +2711,20 @@ void MacroAssembler::Push(const Operand& src) { } +void MacroAssembler::PushQuad(const Operand& src) { + if (kPointerSize == kInt64Size) { + pushq(src); + } else { + movp(kScratchRegister, src); + pushq(kScratchRegister); + } +} + + void MacroAssembler::Push(Immediate value) { if (kPointerSize == kInt64Size) { pushq(value); } else { - ASSERT(kPointerSize == kInt32Size); leal(rsp, Operand(rsp, -4)); movp(Operand(rsp, 0), value); } @@ -2616,7 +2735,6 @@ void MacroAssembler::PushImm32(int32_t imm32) { if (kPointerSize == kInt64Size) { pushq_imm32(imm32); } else { - ASSERT(kPointerSize == kInt32Size); leal(rsp, Operand(rsp, -4)); movp(Operand(rsp, 0), Immediate(imm32)); } @@ -2627,7 +2745,6 @@ void MacroAssembler::Pop(Register dst) { if (kPointerSize == kInt64Size) { popq(dst); } else { - ASSERT(kPointerSize == kInt32Size); // x32 uses 64-bit pop for rbp in the epilogue. ASSERT(dst.code() != rbp.code()); movp(dst, Operand(rsp, 0)); @@ -2640,7 +2757,6 @@ void MacroAssembler::Pop(const Operand& dst) { if (kPointerSize == kInt64Size) { popq(dst); } else { - ASSERT(kPointerSize == kInt32Size); Register scratch = dst.AddressUsesRegister(kScratchRegister) ? kSmiConstantRegister : kScratchRegister; movp(scratch, Operand(rsp, 0)); @@ -2656,10 +2772,44 @@ void MacroAssembler::Pop(const Operand& dst) { } -void MacroAssembler::TestBit(const Operand& src, int bits) { +void MacroAssembler::PopQuad(const Operand& dst) { + if (kPointerSize == kInt64Size) { + popq(dst); + } else { + popq(kScratchRegister); + movp(dst, kScratchRegister); + } +} + + +void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst, + Register base, + int offset) { + ASSERT(offset > SharedFunctionInfo::kLengthOffset && + offset <= SharedFunctionInfo::kSize && + (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); + if (kPointerSize == kInt64Size) { + movsxlq(dst, FieldOperand(base, offset)); + } else { + movp(dst, FieldOperand(base, offset)); + SmiToInteger32(dst, dst); + } +} + + +void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base, + int offset, + int bits) { + ASSERT(offset > SharedFunctionInfo::kLengthOffset && + offset <= SharedFunctionInfo::kSize && + (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); + if (kPointerSize == kInt32Size) { + // On x32, this field is represented by SMI. + bits += kSmiShift; + } int byte_offset = bits / kBitsPerByte; int bit_in_byte = bits & (kBitsPerByte - 1); - testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); + testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte)); } @@ -2673,7 +2823,6 @@ void MacroAssembler::Jump(const Operand& op) { if (kPointerSize == kInt64Size) { jmp(op); } else { - ASSERT(kPointerSize == kInt32Size); movp(kScratchRegister, op); jmp(kScratchRegister); } @@ -2715,7 +2864,6 @@ void MacroAssembler::Call(const Operand& op) { if (kPointerSize == kInt64Size) { call(op); } else { - ASSERT(kPointerSize == kInt32Size); movp(kScratchRegister, op); call(kScratchRegister); } @@ -2893,7 +3041,7 @@ void MacroAssembler::JumpToHandlerEntry() { // a fixed array of (smi-tagged) code offsets. // rax = exception, rdi = code object, rdx = state. movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset)); - shr(rdx, Immediate(StackHandler::kKindWidth)); + shrp(rdx, Immediate(StackHandler::kKindWidth)); movp(rdx, FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); SmiToInteger64(rdx, rdx); @@ -3188,8 +3336,8 @@ void MacroAssembler::LoadUint32(XMMRegister dst, void MacroAssembler::SlowTruncateToI(Register result_reg, Register input_reg, int offset) { - DoubleToIStub stub(input_reg, result_reg, offset, true); - call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); + DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true); + call(stub.GetCode(), RelocInfo::CODE_TARGET); } @@ -3212,6 +3360,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result_reg, } bind(&done); + // Keep our invariant that the upper 32 bits are zero. + movl(result_reg, result_reg); } @@ -3228,6 +3378,8 @@ void MacroAssembler::TruncateDoubleToI(Register result_reg, addp(rsp, Immediate(kDoubleSize)); bind(&done); + // Keep our invariant that the upper 32 bits are zero. + movl(result_reg, result_reg); } @@ -3498,9 +3650,9 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte // field). - TestBit(FieldOperand(kScratchRegister, - SharedFunctionInfo::kCompilerHintsOffset), - SharedFunctionInfo::kBoundFunction); + TestBitSharedFunctionInfoSpecialField(kScratchRegister, + SharedFunctionInfo::kCompilerHintsOffset, + SharedFunctionInfo::kBoundFunction); j(not_zero, miss); } @@ -3573,15 +3725,13 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { } -#ifdef ENABLE_DEBUGGER_SUPPORT void MacroAssembler::DebugBreak() { Set(rax, 0); // No arguments. LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate())); - CEntryStub ces(1); + CEntryStub ces(isolate(), 1); ASSERT(AllowThisStubCall(&ces)); - Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); + Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } -#endif // ENABLE_DEBUGGER_SUPPORT void MacroAssembler::InvokeCode(Register code, @@ -3627,8 +3777,8 @@ void MacroAssembler::InvokeFunction(Register function, ASSERT(function.is(rdi)); movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); movp(rsi, FieldOperand(function, JSFunction::kContextOffset)); - movsxlq(rbx, - FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); + LoadSharedFunctionInfoSpecialField(rbx, rdx, + SharedFunctionInfo::kFormalParameterCountOffset); // Advances rdx to the end of the Code object header, to the start of // the executable code. movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); @@ -4882,7 +5032,7 @@ void MacroAssembler::GetMarkBits(Register addr_reg, shrl(rcx, Immediate(kPointerSizeLog2)); andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1)); movl(mask_reg, Immediate(1)); - shl_cl(mask_reg); + shlp_cl(mask_reg); } @@ -4966,7 +5116,7 @@ void MacroAssembler::EnsureNotWhite( addp(length, Immediate(0x04)); // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2. imulp(length, FieldOperand(value, String::kLengthOffset)); - shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize)); + shrp(length, Immediate(2 + kSmiTagSize + kSmiShiftSize)); addp(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask)); andp(length, Immediate(~kObjectAlignmentMask)); @@ -5065,7 +5215,7 @@ void MacroAssembler::JumpIfDictionaryInPrototypeChain( movp(current, FieldOperand(current, HeapObject::kMapOffset)); movp(scratch1, FieldOperand(current, Map::kBitField2Offset)); andp(scratch1, Immediate(Map::kElementsKindMask)); - shr(scratch1, Immediate(Map::kElementsKindShift)); + shrp(scratch1, Immediate(Map::kElementsKindShift)); cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS)); j(equal, found); movp(current, FieldOperand(current, Map::kPrototypeOffset)); diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h index af65a6546..d9893d621 100644 --- a/deps/v8/src/x64/macro-assembler-x64.h +++ b/deps/v8/src/x64/macro-assembler-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_ #define V8_X64_MACRO_ASSEMBLER_X64_H_ @@ -291,12 +268,10 @@ class MacroAssembler: public Assembler { RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, SmiCheck smi_check = INLINE_SMI_CHECK); -#ifdef ENABLE_DEBUGGER_SUPPORT // --------------------------------------------------------------------------- // Debugger Support void DebugBreak(); -#endif // Generates function and stub prologue code. void Prologue(PrologueFrameMode frame_mode); @@ -657,10 +632,10 @@ class MacroAssembler: public Assembler { Register src, int shift_value); void SmiShiftLogicalRightConstant(Register dst, - Register src, - int shift_value, - Label* on_not_smi_result, - Label::Distance near_jump = Label::kFar); + Register src, + int shift_value, + Label* on_not_smi_result, + Label::Distance near_jump = Label::kFar); void SmiShiftArithmeticRightConstant(Register dst, Register src, int shift_value); @@ -726,12 +701,12 @@ class MacroAssembler: public Assembler { void Push(Smi* smi); - // Save away a 64-bit integer on the stack as two 32-bit integers + // Save away a raw integer with pointer size on the stack as two integers // masquerading as smis so that the garbage collector skips visiting them. - void PushInt64AsTwoSmis(Register src, Register scratch = kScratchRegister); - // Reconstruct a 64-bit integer from two 32-bit integers masquerading as - // smis on the top of stack. - void PopInt64AsTwoSmis(Register dst, Register scratch = kScratchRegister); + void PushRegisterAsTwoSmis(Register src, Register scratch = kScratchRegister); + // Reconstruct a raw integer with pointer size from two integers masquerading + // as smis on the top of stack. + void PopRegisterAsTwoSmis(Register dst, Register scratch = kScratchRegister); void Test(const Operand& dst, Smi* source); @@ -813,8 +788,13 @@ class MacroAssembler: public Assembler { // Move if the registers are not identical. void Move(Register target, Register source); - // Bit-field support. - void TestBit(const Operand& dst, int bit_index); + // TestBit and Load SharedFunctionInfo special field. + void TestBitSharedFunctionInfoSpecialField(Register base, + int offset, + int bit_index); + void LoadSharedFunctionInfoSpecialField(Register dst, + Register base, + int offset); // Handle support void Move(Register dst, Handle<Object> source); @@ -835,14 +815,21 @@ class MacroAssembler: public Assembler { // Emit code to discard a non-negative number of pointer-sized elements // from the stack, clobbering only the rsp register. void Drop(int stack_elements); + // Emit code to discard a positive number of pointer-sized elements + // from the stack under the return address which remains on the top, + // clobbering the rsp register. + void DropUnderReturnAddress(int stack_elements, + Register scratch = kScratchRegister); void Call(Label* target) { call(target); } void Push(Register src); void Push(const Operand& src); + void PushQuad(const Operand& src); void Push(Immediate value); void PushImm32(int32_t imm32); void Pop(Register dst); void Pop(const Operand& dst); + void PopQuad(const Operand& dst); void PushReturnAddressFrom(Register src) { pushq(src); } void PopReturnAddressTo(Register dst) { popq(dst); } void Move(Register dst, ExternalReference ext) { @@ -1026,9 +1013,9 @@ class MacroAssembler: public Assembler { void DecodeField(Register reg) { static const int shift = Field::kShift + kSmiShift; static const int mask = Field::kMask >> Field::kShift; - shr(reg, Immediate(shift)); + shrp(reg, Immediate(shift)); andp(reg, Immediate(mask)); - shl(reg, Immediate(kSmiShift)); + shlp(reg, Immediate(kSmiShift)); } // Abort execution if argument is not a number, enabled via --debug-code. @@ -1218,10 +1205,6 @@ class MacroAssembler: public Assembler { Label* miss, bool miss_on_bound_function = false); - // Generates code for reporting that an illegal operation has - // occurred. - void IllegalOperation(int num_arguments); - // Picks out an array index from the hash field. // Register use: // hash - holds the index's hash. Clobbered. @@ -1310,7 +1293,7 @@ class MacroAssembler: public Assembler { // caller-save registers. Restores context. On return removes // stack_space * kPointerSize (GCed). void CallApiFunctionAndReturn(Register function_address, - Address thunk_address, + ExternalReference thunk_ref, Register thunk_last_arg, int stack_space, Operand return_value_operand, @@ -1460,7 +1443,7 @@ class MacroAssembler: public Assembler { // modified. It may be the "smi 1 constant" register. Register GetSmiConstant(Smi* value); - intptr_t RootRegisterDelta(ExternalReference other); + int64_t RootRegisterDelta(ExternalReference other); // Moves the smi value to the destination register. void LoadSmiConstant(Register dst, Smi* value); @@ -1501,13 +1484,6 @@ class MacroAssembler: public Assembler { Register scratch, AllocationFlags flags); - // Helper for PopHandleScope. Allowed to perform a GC and returns - // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and - // possibly returns a failure object indicating an allocation failure. - Object* PopHandleScopeHelper(Register saved, - Register scratch, - bool gc_allowed); - // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. void InNewSpace(Register object, Register scratch, diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc index c819c71cb..6a9a264f9 100644 --- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc +++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -241,8 +218,8 @@ void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase( int start_reg, Label* on_no_match) { Label fallthrough; - __ movq(rdx, register_location(start_reg)); // Offset of start of capture - __ movq(rbx, register_location(start_reg + 1)); // Offset of end of capture + ReadPositionFromRegister(rdx, start_reg); // Offset of start of capture + ReadPositionFromRegister(rbx, start_reg + 1); // Offset of end of capture __ subp(rbx, rdx); // Length of capture. // ----------------------- @@ -390,8 +367,8 @@ void RegExpMacroAssemblerX64::CheckNotBackReference( Label fallthrough; // Find length of back-referenced capture. - __ movq(rdx, register_location(start_reg)); - __ movq(rax, register_location(start_reg + 1)); + ReadPositionFromRegister(rdx, start_reg); // Offset of start of capture + ReadPositionFromRegister(rax, start_reg + 1); // Offset of end of capture __ subp(rax, rdx); // Length to check. // Fail on partial or illegal capture (start of capture after end of capture). @@ -692,12 +669,12 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) { #else // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9 (and then on stack). // Push register parameters on stack for reference. - ASSERT_EQ(kInputString, -1 * kPointerSize); - ASSERT_EQ(kStartIndex, -2 * kPointerSize); - ASSERT_EQ(kInputStart, -3 * kPointerSize); - ASSERT_EQ(kInputEnd, -4 * kPointerSize); - ASSERT_EQ(kRegisterOutput, -5 * kPointerSize); - ASSERT_EQ(kNumOutputRegisters, -6 * kPointerSize); + ASSERT_EQ(kInputString, -1 * kRegisterSize); + ASSERT_EQ(kStartIndex, -2 * kRegisterSize); + ASSERT_EQ(kInputStart, -3 * kRegisterSize); + ASSERT_EQ(kInputEnd, -4 * kRegisterSize); + ASSERT_EQ(kRegisterOutput, -5 * kRegisterSize); + ASSERT_EQ(kNumOutputRegisters, -6 * kRegisterSize); __ pushq(rdi); __ pushq(rsi); __ pushq(rdx); @@ -747,7 +724,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) { // Load input position. __ movp(rdi, Operand(rbp, kInputStart)); // Set up rdi to be negative offset from string end. - __ subp(rdi, rsi); + __ subq(rdi, rsi); // Set rax to address of char before start of the string // (effectively string position -1). __ movp(rbx, Operand(rbp, kStartIndex)); @@ -831,14 +808,14 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) { __ addp(rcx, rdx); } for (int i = 0; i < num_saved_registers_; i++) { - __ movq(rax, register_location(i)); + __ movp(rax, register_location(i)); if (i == 0 && global_with_zero_length_check()) { // Keep capture start in rdx for the zero-length check later. __ movp(rdx, rax); } __ addp(rax, rcx); // Convert to index from start, not end. if (mode_ == UC16) { - __ sar(rax, Immediate(1)); // Convert byte index to character index. + __ sarp(rax, Immediate(1)); // Convert byte index to character index. } __ movl(Operand(rbx, i * kIntSize), rax); } @@ -1084,13 +1061,31 @@ void RegExpMacroAssemblerX64::PushRegister(int register_index, } +STATIC_ASSERT(kPointerSize == kInt64Size || kPointerSize == kInt32Size); + + void RegExpMacroAssemblerX64::ReadCurrentPositionFromRegister(int reg) { - __ movq(rdi, register_location(reg)); + if (kPointerSize == kInt64Size) { + __ movq(rdi, register_location(reg)); + } else { + // Need sign extension for x32 as rdi might be used as an index register. + __ movsxlq(rdi, register_location(reg)); + } +} + + +void RegExpMacroAssemblerX64::ReadPositionFromRegister(Register dst, int reg) { + if (kPointerSize == kInt64Size) { + __ movq(dst, register_location(reg)); + } else { + // Need sign extension for x32 as dst might be used as an index register. + __ movsxlq(dst, register_location(reg)); + } } void RegExpMacroAssemblerX64::ReadStackPointerFromRegister(int reg) { - __ movq(backtrack_stackpointer(), register_location(reg)); + __ movp(backtrack_stackpointer(), register_location(reg)); __ addp(backtrack_stackpointer(), Operand(rbp, kStackHighEnd)); } @@ -1215,7 +1210,7 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address, ASSERT(*return_address <= re_code->instruction_start() + re_code->instruction_size()); - MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); + Object* result = Execution::HandleStackGuardInterrupt(isolate); if (*code_handle != re_code) { // Return address no longer valid intptr_t delta = code_handle->address() - re_code->address(); diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.h b/deps/v8/src/x64/regexp-macro-assembler-x64.h index b230ea47f..e9f6a35dd 100644 --- a/deps/v8/src/x64/regexp-macro-assembler-x64.h +++ b/deps/v8/src/x64/regexp-macro-assembler-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_ #define V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_ @@ -135,8 +112,8 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler { // Offsets from rbp of function parameters and stored registers. static const int kFramePointer = 0; // Above the frame pointer - function parameters and return address. - static const int kReturn_eip = kFramePointer + kPointerSize; - static const int kFrameAlign = kReturn_eip + kPointerSize; + static const int kReturn_eip = kFramePointer + kRegisterSize; + static const int kFrameAlign = kReturn_eip + kRegisterSize; #ifdef _WIN64 // Parameters (first four passed as registers, but with room on stack). @@ -145,49 +122,50 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler { // use this space to store the register passed parameters. static const int kInputString = kFrameAlign; // StartIndex is passed as 32 bit int. - static const int kStartIndex = kInputString + kPointerSize; - static const int kInputStart = kStartIndex + kPointerSize; - static const int kInputEnd = kInputStart + kPointerSize; - static const int kRegisterOutput = kInputEnd + kPointerSize; + static const int kStartIndex = kInputString + kRegisterSize; + static const int kInputStart = kStartIndex + kRegisterSize; + static const int kInputEnd = kInputStart + kRegisterSize; + static const int kRegisterOutput = kInputEnd + kRegisterSize; // For the case of global regular expression, we have room to store at least // one set of capture results. For the case of non-global regexp, we ignore // this value. NumOutputRegisters is passed as 32-bit value. The upper // 32 bit of this 64-bit stack slot may contain garbage. - static const int kNumOutputRegisters = kRegisterOutput + kPointerSize; - static const int kStackHighEnd = kNumOutputRegisters + kPointerSize; + static const int kNumOutputRegisters = kRegisterOutput + kRegisterSize; + static const int kStackHighEnd = kNumOutputRegisters + kRegisterSize; // DirectCall is passed as 32 bit int (values 0 or 1). - static const int kDirectCall = kStackHighEnd + kPointerSize; - static const int kIsolate = kDirectCall + kPointerSize; + static const int kDirectCall = kStackHighEnd + kRegisterSize; + static const int kIsolate = kDirectCall + kRegisterSize; #else // In AMD64 ABI Calling Convention, the first six integer parameters // are passed as registers, and caller must allocate space on the stack // if it wants them stored. We push the parameters after the frame pointer. - static const int kInputString = kFramePointer - kPointerSize; - static const int kStartIndex = kInputString - kPointerSize; - static const int kInputStart = kStartIndex - kPointerSize; - static const int kInputEnd = kInputStart - kPointerSize; - static const int kRegisterOutput = kInputEnd - kPointerSize; + static const int kInputString = kFramePointer - kRegisterSize; + static const int kStartIndex = kInputString - kRegisterSize; + static const int kInputStart = kStartIndex - kRegisterSize; + static const int kInputEnd = kInputStart - kRegisterSize; + static const int kRegisterOutput = kInputEnd - kRegisterSize; + // For the case of global regular expression, we have room to store at least // one set of capture results. For the case of non-global regexp, we ignore // this value. - static const int kNumOutputRegisters = kRegisterOutput - kPointerSize; + static const int kNumOutputRegisters = kRegisterOutput - kRegisterSize; static const int kStackHighEnd = kFrameAlign; - static const int kDirectCall = kStackHighEnd + kPointerSize; - static const int kIsolate = kDirectCall + kPointerSize; + static const int kDirectCall = kStackHighEnd + kRegisterSize; + static const int kIsolate = kDirectCall + kRegisterSize; #endif #ifdef _WIN64 // Microsoft calling convention has three callee-saved registers // (that we are using). We push these after the frame pointer. - static const int kBackup_rsi = kFramePointer - kPointerSize; - static const int kBackup_rdi = kBackup_rsi - kPointerSize; - static const int kBackup_rbx = kBackup_rdi - kPointerSize; + static const int kBackup_rsi = kFramePointer - kRegisterSize; + static const int kBackup_rdi = kBackup_rsi - kRegisterSize; + static const int kBackup_rbx = kBackup_rdi - kRegisterSize; static const int kLastCalleeSaveRegister = kBackup_rbx; #else // AMD64 Calling Convention has only one callee-save register that // we use. We push this after the frame pointer (and after the // parameters). - static const int kBackup_rbx = kNumOutputRegisters - kPointerSize; + static const int kBackup_rbx = kNumOutputRegisters - kRegisterSize; static const int kLastCalleeSaveRegister = kBackup_rbx; #endif @@ -268,6 +246,8 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler { // Increments the stack pointer (rcx) by a word size. inline void Drop(); + inline void ReadPositionFromRegister(Register dst, int reg); + Isolate* isolate() const { return masm_.isolate(); } MacroAssembler masm_; diff --git a/deps/v8/src/x64/simulator-x64.cc b/deps/v8/src/x64/simulator-x64.cc index 448b025a6..f7f2fb4bb 100644 --- a/deps/v8/src/x64/simulator-x64.cc +++ b/deps/v8/src/x64/simulator-x64.cc @@ -1,26 +1,3 @@ // Copyright 2009 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. diff --git a/deps/v8/src/x64/simulator-x64.h b/deps/v8/src/x64/simulator-x64.h index 8aba70181..a43728f01 100644 --- a/deps/v8/src/x64/simulator-x64.h +++ b/deps/v8/src/x64/simulator-x64.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_X64_SIMULATOR_X64_H_ #define V8_X64_SIMULATOR_X64_H_ diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc index 13e822da2..537f41235 100644 --- a/deps/v8/src/x64/stub-cache-x64.cc +++ b/deps/v8/src/x64/stub-cache-x64.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include "v8.h" @@ -420,7 +397,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE); // Jump to stub. - CallApiFunctionStub stub(is_store, call_data_undefined, argc); + CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); __ TailCallStub(&stub); } @@ -495,6 +472,21 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = descriptors->GetFieldType(descriptor); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + Label do_store; + while (true) { + __ CompareMap(value_reg, it.Current()); + it.Advance(); + if (it.Done()) { + __ j(not_equal, miss_label); + break; + } + __ j(equal, &do_store, Label::kNear); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { Label do_store, heap_number; __ AllocateHeapNumber(storage_reg, scratch1, slow); @@ -639,6 +631,21 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, __ JumpIfNotSmi(value_reg, miss_label); } else if (representation.IsHeapObject()) { __ JumpIfSmi(value_reg, miss_label); + HeapType* field_type = lookup->GetFieldType(); + HeapType::Iterator<Map> it = field_type->Classes(); + if (!it.Done()) { + Label do_store; + while (true) { + __ CompareMap(value_reg, it.Current()); + it.Advance(); + if (it.Done()) { + __ j(not_equal, miss_label); + break; + } + __ j(equal, &do_store, Label::kNear); + } + __ bind(&do_store); + } } else if (representation.IsDouble()) { // Load the double storage. if (index < 0) { @@ -741,7 +748,9 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, int depth = 0; Handle<JSObject> current = Handle<JSObject>::null(); - if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); + if (type->IsConstant()) { + current = Handle<JSObject>::cast(type->AsConstant()->Value()); + } Handle<JSObject> prototype = Handle<JSObject>::null(); Handle<Map> current_map = receiver_map; Handle<Map> holder_map(holder->map()); @@ -764,7 +773,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type, name = factory()->InternalizeString(Handle<String>::cast(name)); } ASSERT(current.is_null() || - current->property_dictionary()->FindEntry(*name) == + current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); GenerateDictionaryNegativeLookup(masm(), miss, reg, name, @@ -908,15 +917,17 @@ void LoadStubCompiler::GenerateLoadField(Register reg, Representation representation) { if (!reg.is(receiver())) __ movp(receiver(), reg); if (kind() == Code::LOAD_IC) { - LoadFieldStub stub(field.is_inobject(holder), + LoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } else { - KeyedLoadFieldStub stub(field.is_inobject(holder), + KeyedLoadFieldStub stub(isolate(), + field.is_inobject(holder), field.translate(holder), representation); - GenerateTailCall(masm(), stub.GetCode(isolate())); + GenerateTailCall(masm(), stub.GetCode()); } } @@ -961,7 +972,7 @@ void LoadStubCompiler::GenerateLoadCallback( Address getter_address = v8::ToCData<Address>(callback->getter()); __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE); - CallApiGetterStub stub; + CallApiGetterStub stub(isolate()); __ TailCallStub(&stub); } @@ -1064,17 +1075,6 @@ void LoadStubCompiler::GenerateLoadInterceptor( } -void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { - Label success; - // Check that the object is a boolean. - __ Cmp(object, factory()->true_value()); - __ j(equal, &success); - __ Cmp(object, factory()->false_value()); - __ j(not_equal, miss); - __ bind(&success); -} - - Handle<Code> StoreStubCompiler::CompileStoreCallback( Handle<JSObject> object, Handle<JSObject> holder, diff --git a/deps/v8/src/zone-allocator.h b/deps/v8/src/zone-allocator.h index 7ed171390..8501c35b2 100644 --- a/deps/v8/src/zone-allocator.h +++ b/deps/v8/src/zone-allocator.h @@ -1,33 +1,12 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ZONE_ALLOCATOR_H_ #define V8_ZONE_ALLOCATOR_H_ +#include <limits> + #include "zone.h" namespace v8 { @@ -57,16 +36,14 @@ class zone_allocator { pointer address(reference x) const {return &x;} const_pointer address(const_reference x) const {return &x;} - pointer allocate(size_type count, const void* hint = 0) { - size_t size = count * sizeof(value_type); - size = RoundUp(size, kPointerSize); - return static_cast<pointer>(zone_->New(size)); + pointer allocate(size_type n, const void* hint = 0) { + return static_cast<pointer>(zone_->NewArray<value_type>( + static_cast<int>(n))); } void deallocate(pointer p, size_type) { /* noop for Zones */ } size_type max_size() const throw() { - size_type max = static_cast<size_type>(-1) / sizeof(T); - return (max > 0 ? max : 1); + return std::numeric_limits<int>::max() / sizeof(value_type); } void construct(pointer p, const T& val) { new(static_cast<void*>(p)) T(val); diff --git a/deps/v8/src/zone-containers.h b/deps/v8/src/zone-containers.h index 31672b62d..c4a1055f9 100644 --- a/deps/v8/src/zone-containers.h +++ b/deps/v8/src/zone-containers.h @@ -1,29 +1,6 @@ // Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ZONE_CONTAINERS_H_ #define V8_ZONE_CONTAINERS_H_ diff --git a/deps/v8/src/zone-inl.h b/deps/v8/src/zone-inl.h index 9b82c0540..c17f33c4a 100644 --- a/deps/v8/src/zone-inl.h +++ b/deps/v8/src/zone-inl.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ZONE_INL_H_ #define V8_ZONE_INL_H_ @@ -39,7 +16,6 @@ #include "counters.h" #include "isolate.h" #include "utils.h" -#include "v8-counters.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/zone.cc b/deps/v8/src/zone.cc index 4f9137129..49efc5a74 100644 --- a/deps/v8/src/zone.cc +++ b/deps/v8/src/zone.cc @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #include <string.h> diff --git a/deps/v8/src/zone.h b/deps/v8/src/zone.h index 83421b396..573e13e1d 100644 --- a/deps/v8/src/zone.h +++ b/deps/v8/src/zone.h @@ -1,29 +1,6 @@ // Copyright 2012 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. #ifndef V8_ZONE_H_ #define V8_ZONE_H_ diff --git a/deps/v8/test/cctest/cctest.gyp b/deps/v8/test/cctest/cctest.gyp index ec5b08dd2..745b4c51d 100644 --- a/deps/v8/test/cctest/cctest.gyp +++ b/deps/v8/test/cctest/cctest.gyp @@ -41,7 +41,7 @@ 'include_dirs': [ '../../src', ], - 'sources': [ + 'sources': [ ### gcmole(all) ### '<(generated_file)', 'cctest.cc', 'gay-fixed.cc', @@ -94,6 +94,7 @@ 'test-mementos.cc', 'test-mutex.cc', 'test-object-observe.cc', + 'test-ordered-hash-table.cc', 'test-parsing.cc', 'test-platform.cc', 'test-platform-tls.cc', @@ -124,7 +125,7 @@ ], 'conditions': [ ['v8_target_arch=="ia32"', { - 'sources': [ + 'sources': [ ### gcmole(arch:ia32) ### 'test-assembler-ia32.cc', 'test-code-stubs.cc', 'test-code-stubs-ia32.cc', @@ -135,7 +136,7 @@ ], }], ['v8_target_arch=="x64"', { - 'sources': [ + 'sources': [ ### gcmole(arch:x64) ### 'test-assembler-x64.cc', 'test-code-stubs.cc', 'test-code-stubs-x64.cc', @@ -146,7 +147,7 @@ ], }], ['v8_target_arch=="arm"', { - 'sources': [ + 'sources': [ ### gcmole(arch:arm) ### 'test-assembler-arm.cc', 'test-code-stubs.cc', 'test-code-stubs-arm.cc', @@ -155,7 +156,7 @@ ], }], ['v8_target_arch=="arm64"', { - 'sources': [ + 'sources': [ ### gcmole(arch:arm64) ### 'test-utils-arm64.cc', 'test-assembler-arm64.cc', 'test-code-stubs.cc', @@ -167,7 +168,7 @@ ], }], ['v8_target_arch=="mipsel"', { - 'sources': [ + 'sources': [ ### gcmole(arch:mipsel) ### 'test-assembler-mips.cc', 'test-code-stubs.cc', 'test-code-stubs-mips.cc', diff --git a/deps/v8/test/cctest/cctest.h b/deps/v8/test/cctest/cctest.h index 635983523..36e1b96eb 100644 --- a/deps/v8/test/cctest/cctest.h +++ b/deps/v8/test/cctest/cctest.h @@ -80,6 +80,20 @@ typedef v8::internal::EnumSet<CcTestExtensionIds> CcTestExtensionFlags; #undef DEFINE_EXTENSION_FLAG +// Use this to expose protected methods in i::Heap. +class TestHeap : public i::Heap { + public: + using i::Heap::AllocateArgumentsObject; + using i::Heap::AllocateByteArray; + using i::Heap::AllocateFixedArray; + using i::Heap::AllocateHeapNumber; + using i::Heap::AllocateJSObject; + using i::Heap::AllocateJSObjectFromMap; + using i::Heap::AllocateMap; + using i::Heap::CopyCode; +}; + + class CcTest { public: typedef void (TestFunction)(); @@ -107,6 +121,10 @@ class CcTest { return i_isolate()->heap(); } + static TestHeap* test_heap() { + return reinterpret_cast<TestHeap*>(i_isolate()->heap()); + } + static v8::Local<v8::Object> global() { return isolate()->GetCurrentContext()->Global(); } @@ -346,19 +364,13 @@ static inline v8::Local<v8::Value> CompileRun(v8::Local<v8::String> source) { static inline v8::Local<v8::Value> PreCompileCompileRun(const char* source) { + // Compile once just to get the preparse data, then compile the second time + // using the data. v8::Isolate* isolate = v8::Isolate::GetCurrent(); - v8::Local<v8::String> source_string = - v8::String::NewFromUtf8(isolate, source); - v8::ScriptData* preparse = v8::ScriptData::PreCompile(source_string); - v8::ScriptCompiler::Source script_source( - source_string, new v8::ScriptCompiler::CachedData( - reinterpret_cast<const uint8_t*>(preparse->Data()), - preparse->Length())); - v8::Local<v8::Script> script = - v8::ScriptCompiler::Compile(isolate, &script_source); - v8::Local<v8::Value> result = script->Run(); - delete preparse; - return result; + v8::ScriptCompiler::Source script_source(v8_str(source)); + v8::ScriptCompiler::Compile(isolate, &script_source, + v8::ScriptCompiler::kProduceDataToCache); + return v8::ScriptCompiler::Compile(isolate, &script_source)->Run(); } @@ -403,8 +415,10 @@ static inline void SimulateFullSpace(v8::internal::NewSpace* space) { int new_linear_size = static_cast<int>( *space->allocation_limit_address() - *space->allocation_top_address()); if (new_linear_size == 0) return; - v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size); - v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe); + v8::internal::AllocationResult allocation = + space->AllocateRaw(new_linear_size); + v8::internal::FreeListNode* node = + v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); node->set_size(space->heap(), new_linear_size); } diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index 2f09743e2..fb73f7a6d 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -65,8 +65,16 @@ # are actually 13 * 38 * 5 * 128 = 316160 individual tests hidden here. 'test-parsing/ParserSync': [PASS, NO_VARIANTS], + # This tests only the type system, so there is no point in running several + # variants. + 'test-types/*': [PASS, NO_VARIANTS], + # BUG(2999). 'test-cpu-profiler/CollectCpuProfile': [PASS, FLAKY], + # BUG(3287). + 'test-cpu-profiler/SampleWhenFrameIsNotSetup': [PASS, FLAKY], + # BUG(3308). + 'test-cpu-profiler/JsNativeJsRuntimeJsSample': [PASS, FLAKY], ############################################################################ # Slow tests. @@ -86,10 +94,13 @@ 'test-cpu-profiler/CollectCpuProfile': [PASS, FAIL], # BUG(v8:3154). - 'test-heap/ReleaseOverReservedPages': [PASS, ['mode == debug', FAIL]], + 'test-heap/ReleaseOverReservedPages': [PASS, FAIL], # BUG(v8:3155). 'test-strings/AsciiArrayJoin': [PASS, ['mode == debug', FAIL]], + + # BUG(v8:3247). + 'test-mark-compact/NoPromotion': [SKIP], }], # 'arch == arm64' ['arch == arm64 and simulator_run == True', { @@ -119,11 +130,18 @@ }], # 'asan == True' ############################################################################## -# This should be 'nosnap == True': issue 3216 to add 'nosnap'. -[ALWAYS, { +['no_snap == True', { # BUG(3215) 'test-lockers/MultithreadedParallelIsolates': [PASS, FAIL], -}], # 'nosnap == True' +}], # 'no_snap == True' + +############################################################################## +# TODO(machenbach): Fix application of '*'. Nosnap windows needs a separate +# section to not overwrite the expectations for TestThatAlwaysFails. +['no_snap == True and system == windows', { + # Windows doesn't support nosnap mode. + 'test-serialize/*': [SKIP], +}], # 'no_snap == True and system == windows' ############################################################################## ['system == windows', { @@ -137,6 +155,9 @@ # BUG(3005). 'test-alloc/CodeRange': [PASS, FAIL], + + # BUG(3215). Crashes on windows. + 'test-lockers/MultithreadedParallelIsolates': [SKIP], }], # 'system == windows' ############################################################################## @@ -169,7 +190,7 @@ }], # 'arch == arm' ############################################################################## -['arch == mipsel', { +['arch == mipsel or arch == mips', { # BUG(2657): Test sometimes times out on MIPS simulator. 'test-thread-termination/TerminateMultipleV8ThreadsDefaultIsolate': [PASS, TIMEOUT], @@ -179,7 +200,7 @@ 'test-serialize/DeserializeFromSecondSerializationAndRunScript2': [SKIP], 'test-serialize/DeserializeAndRunScript2': [SKIP], 'test-serialize/DeserializeFromSecondSerialization': [SKIP], -}], # 'arch == mipsel' +}], # 'arch == mipsel or arch == mips' ############################################################################## ['arch == android_arm or arch == android_ia32', { @@ -226,5 +247,79 @@ # BUG(3150). 'test-api/PreCompileInvalidPreparseDataError': [SKIP], + + 'test-types/Convert' : [SKIP], + 'test-symbols/Create' : [SKIP], + 'test-parsing/ParserSync' : [SKIP], + 'test-parsing/ErrorsEvalAndArguments' : [SKIP], + 'test-parsing/ErrorsFutureStrictReservedWords' : [SKIP], + 'test-parsing/ErrorsReservedWords' : [SKIP], + 'test-parsing/ErrorsYieldStrict' : [SKIP], + 'test-parsing/ErrorsNotAnIdentifierName' : [SKIP], + 'test-parsing/FunctionDeclaresItselfStrict' : [SKIP], + 'test-parsing/ErrorsObjectLiteralChecking' : [SKIP], + 'test-parsing/InvalidLeftHandSide' : [SKIP], + 'test-heap/GarbageCollection' : [SKIP], + 'test-heap/GlobalHandles' : [SKIP], + 'test-heap/WeakGlobalHandlesScavenge' : [SKIP], + 'test-heap/DeleteWeakGlobalHandle' : [SKIP], + 'test-heap/GrowAndShrinkNewSpace' : [SKIP], + 'test-heap/OptimizedAllocationAlwaysInNewSpace' : [SKIP], + 'test-heap/OptimizedPretenuringAllocationFolding' : [SKIP], + 'test-heap/OptimizedPretenuringObjectArrayLiterals' : [SKIP], + 'test-heap/OptimizedPretenuringAllocationFoldingBlocks' : [SKIP], + 'test-heap/OptimizedPretenuringMixedInObjectProperties' : [SKIP], + 'test-heap/OptimizedPretenuringDoubleArrayProperties' : [SKIP], + 'test-heap/OptimizedPretenuringdoubleArrayLiterals' : [SKIP], + 'test-heap/OptimizedPretenuringNestedMixedArrayLiterals' : [SKIP], + 'test-heap/OptimizedPretenuringNestedObjectLiterals' : [SKIP], + 'test-heap/OptimizedPretenuringNestedDoubleLiterals' : [SKIP], + 'test-heap/Regress169928' : [SKIP], + 'test-decls/Unknown' : [SKIP], + 'test-decls/Present' : [SKIP], + 'test-decls/Absent' : [SKIP], + 'test-decls/Appearing' : [SKIP], + 'test-decls/Reappearing' : [SKIP], + 'test-decls/ExistsInPrototype' : [SKIP], + 'test-decls/AbsentInPrototype' : [SKIP], + 'test-decls/ExistsInHiddenPrototype' : [SKIP], + 'test-debug/ConditionalScriptBreakPoint' : [SKIP], + 'test-debug/DebugEvaluate' : [SKIP], + 'test-debug/ConditionalBreakpointWithCodeGenerationDisallowed' : [SKIP], + 'test-debug/DebugEvaluateWithCodeGenerationDisallowed' : [SKIP], + 'test-debug/DebugBreak' : [SKIP], + 'test-debug/ThreadedDebugging' : [SKIP], + 'test-debug/RecursiveBreakpoints' : [SKIP], + 'test-dictionary/HashMap' : [SKIP], + 'test-debug/Backtrace' : [SKIP], + 'test-debug/DebugBreakLoop' : [SKIP], + 'test-constantpool/ConstantPool' : [SKIP], + 'test-compiler/GetScriptLineNumber' : [SKIP], + 'test-api/ScriptMakingExternalString' : [SKIP], + 'test-api/ScriptMakingExternalAsciiString' : [SKIP], + 'test-api/MakingExternalStringConditions' : [SKIP], + 'test-api/MakingExternalAsciiStringConditions' : [SKIP], + 'test-api/MakingExternalUnalignedAsciiString' : [SKIP], + 'test-api/IndexedInterceptorUnboxedDoubleWithIndexedAccessor' : [SKIP], + 'test-api/IndependentWeakHandle' : [SKIP], + 'test-api/GCFromWeakCallbacks' : [SKIP], + 'test-api/IndependentHandleRevival' : [SKIP], + 'test-api/StringWrite' : [SKIP], + 'test-api/Threading3' : [SKIP], + 'test-api/Threading4' : [SKIP], + 'test-api/Threading2' : [SKIP], + 'test-api/FixedFloat32Array' : [SKIP], + 'test-api/FixedFloat64Array' : [SKIP], + 'test-api/ExternalFloat32Array' : [SKIP], + 'test-api/ExternalFloat64Array' : [SKIP], + 'test-api/ExternalArrays' : [SKIP], + 'test-api/Float32Array' : [SKIP], + 'test-api/Float64Array' : [SKIP], + 'test-api/Regress2333' : [SKIP], + 'test-alloc/StressHandles' : [SKIP], + 'test-alloc/StressJS' : [SKIP], + 'test-accessors/HandleScopePop' : [SKIP], + 'test-accessors/Gc' : [SKIP], + }], # 'arch == nacl_ia32 or arch == nacl_x64' ] diff --git a/deps/v8/test/cctest/profiler-extension.cc b/deps/v8/test/cctest/profiler-extension.cc index 80d9f9041..1fdd1ba24 100644 --- a/deps/v8/test/cctest/profiler-extension.cc +++ b/deps/v8/test/cctest/profiler-extension.cc @@ -34,7 +34,7 @@ namespace v8 { namespace internal { -const v8::CpuProfile* ProfilerExtension::last_profile = NULL; +v8::CpuProfile* ProfilerExtension::last_profile = NULL; const char* ProfilerExtension::kSource = "native function startProfiling();" "native function stopProfiling();"; @@ -58,7 +58,7 @@ void ProfilerExtension::StartProfiling( const v8::FunctionCallbackInfo<v8::Value>& args) { last_profile = NULL; v8::CpuProfiler* cpu_profiler = args.GetIsolate()->GetCpuProfiler(); - cpu_profiler->StartCpuProfiling((args.Length() > 0) + cpu_profiler->StartProfiling((args.Length() > 0) ? args[0].As<v8::String>() : v8::String::Empty(args.GetIsolate())); } @@ -67,7 +67,7 @@ void ProfilerExtension::StartProfiling( void ProfilerExtension::StopProfiling( const v8::FunctionCallbackInfo<v8::Value>& args) { v8::CpuProfiler* cpu_profiler = args.GetIsolate()->GetCpuProfiler(); - last_profile = cpu_profiler->StopCpuProfiling((args.Length() > 0) + last_profile = cpu_profiler->StopProfiling((args.Length() > 0) ? args[0].As<v8::String>() : v8::String::Empty(args.GetIsolate())); } diff --git a/deps/v8/test/cctest/profiler-extension.h b/deps/v8/test/cctest/profiler-extension.h index 392a7efbc..c26a29c39 100644 --- a/deps/v8/test/cctest/profiler-extension.h +++ b/deps/v8/test/cctest/profiler-extension.h @@ -43,7 +43,7 @@ class ProfilerExtension : public v8::Extension { v8::Handle<v8::String> name); static void StartProfiling(const v8::FunctionCallbackInfo<v8::Value>& args); static void StopProfiling(const v8::FunctionCallbackInfo<v8::Value>& args); - static const v8::CpuProfile* last_profile; + static v8::CpuProfile* last_profile; private: static const char* kSource; }; diff --git a/deps/v8/test/cctest/test-alloc.cc b/deps/v8/test/cctest/test-alloc.cc index 0d4ab886a..7a213ae4b 100644 --- a/deps/v8/test/cctest/test-alloc.cc +++ b/deps/v8/test/cctest/test-alloc.cc @@ -27,6 +27,7 @@ #include "v8.h" #include "accessors.h" +#include "api.h" #include "cctest.h" @@ -34,32 +35,33 @@ using namespace v8::internal; -static MaybeObject* AllocateAfterFailures() { +static AllocationResult AllocateAfterFailures() { static int attempts = 0; - if (++attempts < 3) return Failure::RetryAfterGC(); - Heap* heap = CcTest::heap(); + + if (++attempts < 3) return AllocationResult::Retry(); + TestHeap* heap = CcTest::test_heap(); // New space. SimulateFullSpace(heap->new_space()); - CHECK(!heap->AllocateByteArray(100)->IsFailure()); - CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure()); + heap->AllocateByteArray(100).ToObjectChecked(); + heap->AllocateFixedArray(100, NOT_TENURED).ToObjectChecked(); // Make sure we can allocate through optimized allocation functions // for specific kinds. - CHECK(!heap->AllocateFixedArray(100)->IsFailure()); - CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure()); - CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure()); + heap->AllocateFixedArray(100).ToObjectChecked(); + heap->AllocateHeapNumber(0.42).ToObjectChecked(); + heap->AllocateArgumentsObject(Smi::FromInt(87), 10).ToObjectChecked(); Object* object = heap->AllocateJSObject( - *CcTest::i_isolate()->object_function())->ToObjectChecked(); - CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure()); + *CcTest::i_isolate()->object_function()).ToObjectChecked(); + heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked(); // Old data space. SimulateFullSpace(heap->old_data_space()); - CHECK(!heap->AllocateRawOneByteString(100, TENURED)->IsFailure()); + heap->AllocateByteArray(100, TENURED).ToObjectChecked(); // Old pointer space. SimulateFullSpace(heap->old_pointer_space()); - CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure()); + heap->AllocateFixedArray(10000, TENURED).ToObjectChecked(); // Large object space. static const int kLargeObjectSpaceFillerLength = 300000; @@ -67,22 +69,22 @@ static MaybeObject* AllocateAfterFailures() { kLargeObjectSpaceFillerLength); ASSERT(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize()); while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) { - CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)-> - IsFailure()); + heap->AllocateFixedArray( + kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked(); } - CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)-> - IsFailure()); + heap->AllocateFixedArray( + kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked(); // Map space. SimulateFullSpace(heap->map_space()); int instance_size = JSObject::kHeaderSize; - CHECK(!heap->AllocateMap(JS_OBJECT_TYPE, instance_size)->IsFailure()); + heap->AllocateMap(JS_OBJECT_TYPE, instance_size).ToObjectChecked(); // Test that we can allocate in old pointer space and code space. SimulateFullSpace(heap->code_space()); - CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure()); - CHECK(!heap->CopyCode(CcTest::i_isolate()->builtins()->builtin( - Builtins::kIllegal))->IsFailure()); + heap->AllocateFixedArray(100, TENURED).ToObjectChecked(); + heap->CopyCode(CcTest::i_isolate()->builtins()->builtin( + Builtins::kIllegal)).ToObjectChecked(); // Return success. return Smi::FromInt(42); @@ -104,16 +106,29 @@ TEST(StressHandles) { } -static MaybeObject* TestAccessorGet(Isolate* isolate, Object* object, void*) { - return AllocateAfterFailures(); +void TestGetter( + v8::Local<v8::String> name, + const v8::PropertyCallbackInfo<v8::Value>& info) { + i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate()); + HandleScope scope(isolate); + info.GetReturnValue().Set(v8::Utils::ToLocal(Test())); } -const AccessorDescriptor kDescriptor = { - TestAccessorGet, - 0, - 0 -}; +void TestSetter( + v8::Local<v8::String> name, + v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<void>& info) { + UNREACHABLE(); +} + + +Handle<AccessorInfo> TestAccessorInfo( + Isolate* isolate, PropertyAttributes attributes) { + Handle<String> name = isolate->factory()->NewStringFromStaticAscii("get"); + return Accessors::MakeAccessor(isolate, name, &TestGetter, &TestSetter, + attributes); +} TEST(StressJS) { @@ -122,8 +137,8 @@ TEST(StressJS) { v8::HandleScope scope(CcTest::isolate()); v8::Handle<v8::Context> env = v8::Context::New(CcTest::isolate()); env->Enter(); - Handle<JSFunction> function = - factory->NewFunction(factory->function_string(), factory->null_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + factory->function_string(), factory->null_value()); // Force the creation of an initial map and set the code to // something empty. factory->NewJSObject(function); @@ -132,20 +147,15 @@ TEST(StressJS) { // Patch the map to have an accessor for "get". Handle<Map> map(function->initial_map()); Handle<DescriptorArray> instance_descriptors(map->instance_descriptors()); - Handle<Foreign> foreign = factory->NewForeign(&kDescriptor); - Handle<String> name = - factory->NewStringFromAscii(Vector<const char>("get", 3)); ASSERT(instance_descriptors->IsEmpty()); - Handle<DescriptorArray> new_descriptors = factory->NewDescriptorArray(0, 1); - - v8::internal::DescriptorArray::WhitenessWitness witness(*new_descriptors); - map->set_instance_descriptors(*new_descriptors); + PropertyAttributes attrs = static_cast<PropertyAttributes>(0); + Handle<AccessorInfo> foreign = TestAccessorInfo(isolate, attrs); + Map::EnsureDescriptorSlack(map, 1); - CallbacksDescriptor d(*name, - *foreign, - static_cast<PropertyAttributes>(0)); - map->AppendDescriptor(&d, witness); + CallbacksDescriptor d(Handle<Name>(Name::cast(foreign->name())), + foreign, attrs); + map->AppendDescriptor(&d); // Add the Foo constructor the global object. env->Global()->Set(v8::String::NewFromUtf8(CcTest::isolate(), "Foo"), diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc index 5ee43d3e0..d8fa648bf 100644 --- a/deps/v8/test/cctest/test-api.cc +++ b/deps/v8/test/cctest/test-api.cc @@ -93,7 +93,7 @@ void RunWithProfiler(void (*test)()) { v8::String::NewFromUtf8(env->GetIsolate(), "my_profile1"); v8::CpuProfiler* cpu_profiler = env->GetIsolate()->GetCpuProfiler(); - cpu_profiler->StartCpuProfiling(profile_name); + cpu_profiler->StartProfiling(profile_name); (*test)(); reinterpret_cast<i::CpuProfiler*>(cpu_profiler)->DeleteAllProfiles(); } @@ -1917,14 +1917,14 @@ static void EchoNamedProperty(Local<String> name, void SimpleAccessorGetter(Local<String> name, const v8::PropertyCallbackInfo<v8::Value>& info) { - Handle<Object> self = info.This(); + Handle<Object> self = Handle<Object>::Cast(info.This()); info.GetReturnValue().Set( self->Get(String::Concat(v8_str("accessor_"), name))); } void SimpleAccessorSetter(Local<String> name, Local<Value> value, const v8::PropertyCallbackInfo<void>& info) { - Handle<Object> self = info.This(); + Handle<Object> self = Handle<Object>::Cast(info.This()); self->Set(String::Concat(v8_str("accessor_"), name), value); } @@ -1947,7 +1947,7 @@ void InterceptorGetter(Local<String> name, for (i = 0; name_str[i] && prefix[i]; ++i) { if (name_str[i] != prefix[i]) return; } - Handle<Object> self = info.This(); + Handle<Object> self = Handle<Object>::Cast(info.This()); info.GetReturnValue().Set(self->GetHiddenValue(v8_str(name_str + i))); } @@ -1966,7 +1966,7 @@ void InterceptorSetter(Local<String> name, if (!prefix[i]) return; if (value->IsInt32() && value->Int32Value() < 10000) { - Handle<Object> self = info.This(); + Handle<Object> self = Handle<Object>::Cast(info.This()); self->SetHiddenValue(name, value); info.GetReturnValue().Set(value); } @@ -2748,6 +2748,25 @@ THREADED_TEST(IdentityHash) { } +THREADED_TEST(GlobalProxyIdentityHash) { + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); + v8::HandleScope scope(isolate); + Handle<Object> global_proxy = env->Global(); + int hash1 = global_proxy->GetIdentityHash(); + // Hash should be retained after being detached. + env->DetachGlobal(); + int hash2 = global_proxy->GetIdentityHash(); + CHECK_EQ(hash1, hash2); + { + // Re-attach global proxy to a new context, hash should stay the same. + LocalContext env2(NULL, Handle<ObjectTemplate>(), global_proxy); + int hash3 = global_proxy->GetIdentityHash(); + CHECK_EQ(hash1, hash3); + } +} + + THREADED_TEST(SymbolProperties) { i::FLAG_harmony_symbols = true; @@ -3498,22 +3517,23 @@ THREADED_TEST(UniquePersistent) { template<typename K, typename V> class WeakStdMapTraits : public v8::StdMapTraits<K, V> { public: - typedef typename v8::DefaultPersistentValueMapTraits<K, V>::Impl Impl; - static const bool kIsWeak = true; + typedef typename v8::PersistentValueMap<K, V, WeakStdMapTraits<K, V> > + MapType; + static const v8::PersistentContainerCallbackType kCallbackType = v8::kWeak; struct WeakCallbackDataType { - Impl* impl; + MapType* map; K key; }; static WeakCallbackDataType* WeakCallbackParameter( - Impl* impl, const K& key, Local<V> value) { + MapType* map, const K& key, Local<V> value) { WeakCallbackDataType* data = new WeakCallbackDataType; - data->impl = impl; + data->map = map; data->key = key; return data; } - static Impl* ImplFromWeakCallbackData( + static MapType* MapFromWeakCallbackData( const v8::WeakCallbackData<V, WeakCallbackDataType>& data) { - return data.GetParameter()->impl; + return data.GetParameter()->map; } static K KeyFromWeakCallbackData( const v8::WeakCallbackData<V, WeakCallbackDataType>& data) { @@ -3523,7 +3543,7 @@ class WeakStdMapTraits : public v8::StdMapTraits<K, V> { delete data; } static void Dispose(v8::Isolate* isolate, v8::UniquePersistent<V> value, - Impl* impl, K key) { } + K key) { } }; @@ -3545,6 +3565,10 @@ static void TestPersistentValueMap() { CHECK_EQ(1, static_cast<int>(map.Size())); obj = map.Get(7); CHECK_EQ(expected, obj); + { + typename Map::PersistentValueReference ref = map.GetReference(7); + CHECK_EQ(expected, ref.NewLocal(isolate)); + } v8::UniquePersistent<v8::Object> removed = map.Remove(7); CHECK_EQ(0, static_cast<int>(map.Size())); CHECK(expected == removed); @@ -3554,6 +3578,15 @@ static void TestPersistentValueMap() { CHECK_EQ(1, static_cast<int>(map.Size())); map.Set(8, expected); CHECK_EQ(1, static_cast<int>(map.Size())); + { + typename Map::PersistentValueReference ref; + Local<v8::Object> expected2 = v8::Object::New(isolate); + removed = map.Set(8, + v8::UniquePersistent<v8::Object>(isolate, expected2), &ref); + CHECK_EQ(1, static_cast<int>(map.Size())); + CHECK(expected == removed); + CHECK_EQ(expected2, ref.NewLocal(isolate)); + } } CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count()); if (map.IsWeak()) { @@ -3572,12 +3605,55 @@ TEST(PersistentValueMap) { TestPersistentValueMap<v8::StdPersistentValueMap<int, v8::Object> >(); // Custom traits with weak callbacks: - typedef v8::StdPersistentValueMap<int, v8::Object, + typedef v8::PersistentValueMap<int, v8::Object, WeakStdMapTraits<int, v8::Object> > WeakPersistentValueMap; TestPersistentValueMap<WeakPersistentValueMap>(); } +TEST(PersistentValueVector) { + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); + v8::internal::GlobalHandles* global_handles = + reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + int handle_count = global_handles->global_handles_count(); + HandleScope scope(isolate); + + v8::PersistentValueVector<v8::Object> vector(isolate); + + Local<v8::Object> obj1 = v8::Object::New(isolate); + Local<v8::Object> obj2 = v8::Object::New(isolate); + v8::UniquePersistent<v8::Object> obj3(isolate, v8::Object::New(isolate)); + + CHECK(vector.IsEmpty()); + CHECK_EQ(0, static_cast<int>(vector.Size())); + + vector.ReserveCapacity(3); + CHECK(vector.IsEmpty()); + + vector.Append(obj1); + vector.Append(obj2); + vector.Append(obj1); + vector.Append(obj3.Pass()); + vector.Append(obj1); + + CHECK(!vector.IsEmpty()); + CHECK_EQ(5, static_cast<int>(vector.Size())); + CHECK(obj3.IsEmpty()); + CHECK_EQ(obj1, vector.Get(0)); + CHECK_EQ(obj1, vector.Get(2)); + CHECK_EQ(obj1, vector.Get(4)); + CHECK_EQ(obj2, vector.Get(1)); + + CHECK_EQ(5 + handle_count, global_handles->global_handles_count()); + + vector.Clear(); + CHECK(vector.IsEmpty()); + CHECK_EQ(0, static_cast<int>(vector.Size())); + CHECK_EQ(handle_count, global_handles->global_handles_count()); +} + + THREADED_TEST(GlobalHandleUpcast) { v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope scope(isolate); @@ -4038,7 +4114,7 @@ TEST(ApiObjectGroupsCycleForScavenger) { v8::internal::Heap* heap = reinterpret_cast<v8::internal::Isolate*>( iso)->heap(); - heap->CollectGarbage(i::NEW_SPACE); + heap->CollectAllGarbage(i::Heap::kNoGCFlags); // All objects should be alive. CHECK_EQ(0, counter.NumberOfWeakCalls()); @@ -4070,7 +4146,7 @@ TEST(ApiObjectGroupsCycleForScavenger) { v8_str("x"), Local<Value>::New(iso, g1s1.handle)); } - heap->CollectGarbage(i::NEW_SPACE); + heap->CollectAllGarbage(i::Heap::kNoGCFlags); // All objects should be gone. 7 global handles in total. CHECK_EQ(7, counter.NumberOfWeakCalls()); @@ -7532,7 +7608,7 @@ int GetUtf8Length(Handle<String> str) { int len = str->Utf8Length(); if (len < 0) { i::Handle<i::String> istr(v8::Utils::OpenHandle(*str)); - i::FlattenString(istr); + i::String::Flatten(istr); len = str->Utf8Length(); } return len; @@ -8148,10 +8224,9 @@ static void YGetter(Local<String> name, static void YSetter(Local<String> name, Local<Value> value, const v8::PropertyCallbackInfo<void>& info) { - if (info.This()->Has(name)) { - info.This()->Delete(name); - } - info.This()->Set(name, value); + Local<Object> this_obj = Local<Object>::Cast(info.This()); + if (this_obj->Has(name)) this_obj->Delete(name); + this_obj->Set(name, value); } @@ -8195,34 +8270,6 @@ THREADED_TEST(TypeSwitch) { } -// For use within the TestSecurityHandler() test. -static bool g_security_callback_result = false; -static bool NamedSecurityTestCallback(Local<v8::Object> global, - Local<Value> name, - v8::AccessType type, - Local<Value> data) { - // Always allow read access. - if (type == v8::ACCESS_GET) - return true; - - // Sometimes allow other access. - return g_security_callback_result; -} - - -static bool IndexedSecurityTestCallback(Local<v8::Object> global, - uint32_t key, - v8::AccessType type, - Local<Value> data) { - // Always allow read access. - if (type == v8::ACCESS_GET) - return true; - - // Sometimes allow other access. - return g_security_callback_result; -} - - static int trouble_nesting = 0; static void TroubleCallback(const v8::FunctionCallbackInfo<v8::Value>& args) { ApiTestFuzzer::Fuzz(); @@ -8349,6 +8396,36 @@ TEST(TryCatchFinallyUsingTryCatchHandler) { } +// For use within the TestSecurityHandler() test. +static bool g_security_callback_result = false; +static bool NamedSecurityTestCallback(Local<v8::Object> global, + Local<Value> name, + v8::AccessType type, + Local<Value> data) { + printf("a\n"); + // Always allow read access. + if (type == v8::ACCESS_GET) + return true; + + // Sometimes allow other access. + return g_security_callback_result; +} + + +static bool IndexedSecurityTestCallback(Local<v8::Object> global, + uint32_t key, + v8::AccessType type, + Local<Value> data) { + printf("b\n"); + // Always allow read access. + if (type == v8::ACCESS_GET) + return true; + + // Sometimes allow other access. + return g_security_callback_result; +} + + // SecurityHandler can't be run twice TEST(SecurityHandler) { v8::Isolate* isolate = CcTest::isolate(); @@ -8520,6 +8597,61 @@ THREADED_TEST(SecurityChecksForPrototypeChain) { } +static bool named_security_check_with_gc_called; + +static bool NamedSecurityCallbackWithGC(Local<v8::Object> global, + Local<Value> name, + v8::AccessType type, + Local<Value> data) { + CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags); + named_security_check_with_gc_called = true; + return true; +} + + +static bool indexed_security_check_with_gc_called; + +static bool IndexedSecurityTestCallbackWithGC(Local<v8::Object> global, + uint32_t key, + v8::AccessType type, + Local<Value> data) { + CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags); + indexed_security_check_with_gc_called = true; + return true; +} + + +TEST(SecurityTestGCAllowed) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope handle_scope(isolate); + v8::Handle<v8::ObjectTemplate> object_template = + v8::ObjectTemplate::New(isolate); + object_template->SetAccessCheckCallbacks(NamedSecurityCallbackWithGC, + IndexedSecurityTestCallbackWithGC); + + v8::Handle<Context> context = Context::New(isolate); + v8::Context::Scope context_scope(context); + + context->Global()->Set(v8_str("obj"), object_template->NewInstance()); + + named_security_check_with_gc_called = false; + CompileRun("obj.foo = new String(1001);"); + CHECK(named_security_check_with_gc_called); + + indexed_security_check_with_gc_called = false; + CompileRun("obj[0] = new String(1002);"); + CHECK(indexed_security_check_with_gc_called); + + named_security_check_with_gc_called = false; + CHECK(CompileRun("obj.foo")->ToString()->Equals(v8_str("1001"))); + CHECK(named_security_check_with_gc_called); + + indexed_security_check_with_gc_called = false; + CHECK(CompileRun("obj[0]")->ToString()->Equals(v8_str("1002"))); + CHECK(indexed_security_check_with_gc_called); +} + + THREADED_TEST(CrossDomainDelete) { LocalContext env1; v8::HandleScope handle_scope(env1->GetIsolate()); @@ -9282,7 +9414,6 @@ TEST(AccessControlES5) { CHECK_EQ(42, g_echo_value_1); v8::Handle<Value> value; - // We follow Safari in ignoring assignments to host object accessors. CompileRun("Object.defineProperty(other, 'accessible_prop', {value: -1})"); value = CompileRun("other.accessible_prop == 42"); CHECK(value->IsTrue()); @@ -11345,7 +11476,7 @@ THREADED_TEST(InterceptorLoadICInvalidatedFieldViaGlobal) { static void SetOnThis(Local<String> name, Local<Value> value, const v8::PropertyCallbackInfo<void>& info) { - info.This()->ForceSet(name, value); + Local<Object>::Cast(info.This())->ForceSet(name, value); } @@ -14125,6 +14256,7 @@ static void event_handler(const v8::JitCodeEvent* event) { UNINITIALIZED_TEST(SetJitCodeEventHandler) { i::FLAG_stress_compaction = true; i::FLAG_incremental_marking = false; + if (i::FLAG_never_compact) return; const char* script = "function bar() {" " var sum = 0;" @@ -14766,74 +14898,31 @@ THREADED_TEST(TurnOnAccessCheckAndRecompile) { } -// This test verifies that pre-compilation (aka preparsing) can be called -// without initializing the whole VM. Thus we cannot run this test in a -// multi-threaded setup. -TEST(PreCompile) { - // TODO(155): This test would break without the initialization of V8. This is - // a workaround for now to make this test not fail. - v8::V8::Initialize(); - v8::Isolate* isolate = CcTest::isolate(); - HandleScope handle_scope(isolate); - const char* script = "function foo(a) { return a+1; }"; - v8::ScriptData* sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); - CHECK_NE(sd->Length(), 0); - CHECK_NE(sd->Data(), NULL); - CHECK(!sd->HasError()); - delete sd; -} - - -TEST(PreCompileWithError) { - v8::V8::Initialize(); - v8::Isolate* isolate = CcTest::isolate(); - HandleScope handle_scope(isolate); - const char* script = "function foo(a) { return 1 * * 2; }"; - v8::ScriptData* sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); - CHECK(sd->HasError()); - delete sd; -} - - -TEST(Regress31661) { - v8::V8::Initialize(); - v8::Isolate* isolate = CcTest::isolate(); - HandleScope handle_scope(isolate); - const char* script = " The Definintive Guide"; - v8::ScriptData* sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); - CHECK(sd->HasError()); - delete sd; -} - - // Tests that ScriptData can be serialized and deserialized. TEST(PreCompileSerialization) { v8::V8::Initialize(); - v8::Isolate* isolate = CcTest::isolate(); + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); HandleScope handle_scope(isolate); - const char* script = "function foo(a) { return a+1; }"; - v8::ScriptData* sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); + i::FLAG_min_preparse_length = 0; + const char* script = "function foo(a) { return a+1; }"; + v8::ScriptCompiler::Source source(v8_str(script)); + v8::ScriptCompiler::Compile(isolate, &source, + v8::ScriptCompiler::kProduceDataToCache); // Serialize. - int serialized_data_length = sd->Length(); - char* serialized_data = i::NewArray<char>(serialized_data_length); - i::OS::MemCopy(serialized_data, sd->Data(), serialized_data_length); + const v8::ScriptCompiler::CachedData* cd = source.GetCachedData(); + char* serialized_data = i::NewArray<char>(cd->length); + i::OS::MemCopy(serialized_data, cd->data, cd->length); // Deserialize. - v8::ScriptData* deserialized_sd = - v8::ScriptData::New(serialized_data, serialized_data_length); + i::ScriptData* deserialized = i::ScriptData::New(serialized_data, cd->length); // Verify that the original is the same as the deserialized. - CHECK_EQ(sd->Length(), deserialized_sd->Length()); - CHECK_EQ(0, memcmp(sd->Data(), deserialized_sd->Data(), sd->Length())); - CHECK_EQ(sd->HasError(), deserialized_sd->HasError()); + CHECK_EQ(cd->length, deserialized->Length()); + CHECK_EQ(0, memcmp(cd->data, deserialized->Data(), cd->length)); - delete sd; - delete deserialized_sd; + delete deserialized; i::DeleteArray(serialized_data); } @@ -14843,27 +14932,32 @@ TEST(PreCompileDeserializationError) { v8::V8::Initialize(); const char* data = "DONT CARE"; int invalid_size = 3; - v8::ScriptData* sd = v8::ScriptData::New(data, invalid_size); - - CHECK_EQ(0, sd->Length()); - - delete sd; + i::ScriptData* sd = i::ScriptData::New(data, invalid_size); + CHECK_EQ(NULL, sd); } -// Attempts to deserialize bad data. -TEST(PreCompileInvalidPreparseDataError) { +TEST(CompileWithInvalidCachedData) { v8::V8::Initialize(); v8::Isolate* isolate = CcTest::isolate(); LocalContext context; v8::HandleScope scope(context->GetIsolate()); + i::FLAG_min_preparse_length = 0; const char* script = "function foo(){ return 5;}\n" "function bar(){ return 6 + 7;} foo();"; - v8::ScriptData* sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); + v8::ScriptCompiler::Source source(v8_str(script)); + v8::ScriptCompiler::Compile(isolate, &source, + v8::ScriptCompiler::kProduceDataToCache); + // source owns its cached data. Create a ScriptData based on it. The user + // never needs to create ScriptDatas any more; we only need it here because we + // want to modify the data before passing it back. + const v8::ScriptCompiler::CachedData* cd = source.GetCachedData(); + // ScriptData does not take ownership of the buffers passed to it. + i::ScriptData* sd = + i::ScriptData::New(reinterpret_cast<const char*>(cd->data), cd->length); CHECK(!sd->HasError()); - // ScriptDataImpl private implementation details + // ScriptData private implementation details const int kHeaderSize = i::PreparseDataConstants::kHeaderSize; const int kFunctionEntrySize = i::FunctionEntry::kSize; const int kFunctionEntryStartOffset = 0; @@ -14875,37 +14969,73 @@ TEST(PreCompileInvalidPreparseDataError) { sd_data[kHeaderSize + 1 * kFunctionEntrySize + kFunctionEntryEndOffset] = 0; v8::TryCatch try_catch; - v8::ScriptCompiler::Source script_source( - String::NewFromUtf8(isolate, script), + // Make the script slightly different so that we don't hit the compilation + // cache. Don't change the lenghts of tokens. + const char* script2 = "function foo(){ return 6;}\n" + "function bar(){ return 6 + 7;} foo();"; + v8::ScriptCompiler::Source source2( + v8_str(script2), + // CachedData doesn't take ownership of the buffers, Source takes + // ownership of CachedData. new v8::ScriptCompiler::CachedData( reinterpret_cast<const uint8_t*>(sd->Data()), sd->Length())); Local<v8::UnboundScript> compiled_script = - v8::ScriptCompiler::CompileUnbound(isolate, &script_source); + v8::ScriptCompiler::CompileUnbound(isolate, &source2); CHECK(try_catch.HasCaught()); - String::Utf8Value exception_value(try_catch.Message()->Get()); - CHECK_EQ("Uncaught SyntaxError: Invalid preparser data for function bar", - *exception_value); + { + String::Utf8Value exception_value(try_catch.Message()->Get()); + CHECK_EQ("Uncaught SyntaxError: Invalid cached data for function bar", + *exception_value); + } try_catch.Reset(); delete sd; - // Overwrite function bar's start position with 200. The function entry - // will not be found when searching for it by position and we should fall - // back on eager compilation. - sd = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, script, v8::String::kNormalString, i::StrLength(script))); + // Overwrite function bar's start position with 200. The function entry will + // not be found when searching for it by position, and the compilation fails. + + // ScriptData does not take ownership of the buffers passed to it. + sd = i::ScriptData::New(reinterpret_cast<const char*>(cd->data), cd->length); sd_data = reinterpret_cast<unsigned*>(const_cast<char*>(sd->Data())); sd_data[kHeaderSize + 1 * kFunctionEntrySize + kFunctionEntryStartOffset] = 200; - v8::ScriptCompiler::Source script_source2( - String::NewFromUtf8(isolate, script), + const char* script3 = "function foo(){ return 7;}\n" + "function bar(){ return 6 + 7;} foo();"; + v8::ScriptCompiler::Source source3( + v8_str(script3), new v8::ScriptCompiler::CachedData( reinterpret_cast<const uint8_t*>(sd->Data()), sd->Length())); compiled_script = - v8::ScriptCompiler::CompileUnbound(isolate, &script_source2); - CHECK(!try_catch.HasCaught()); + v8::ScriptCompiler::CompileUnbound(isolate, &source3); + CHECK(try_catch.HasCaught()); + { + String::Utf8Value exception_value(try_catch.Message()->Get()); + CHECK_EQ("Uncaught SyntaxError: Invalid cached data for function bar", + *exception_value); + } + CHECK(compiled_script.IsEmpty()); + try_catch.Reset(); + delete sd; + // Try passing in cached data which is obviously invalid (wrong length). + sd = i::ScriptData::New(reinterpret_cast<const char*>(cd->data), cd->length); + const char* script4 = + "function foo(){ return 8;}\n" + "function bar(){ return 6 + 7;} foo();"; + v8::ScriptCompiler::Source source4( + v8_str(script4), + new v8::ScriptCompiler::CachedData( + reinterpret_cast<const uint8_t*>(sd->Data()), sd->Length() - 1)); + compiled_script = + v8::ScriptCompiler::CompileUnbound(isolate, &source4); + CHECK(try_catch.HasCaught()); + { + String::Utf8Value exception_value(try_catch.Message()->Get()); + CHECK_EQ("Uncaught SyntaxError: Invalid cached data", + *exception_value); + } + CHECK(compiled_script.IsEmpty()); delete sd; } @@ -15065,9 +15195,11 @@ THREADED_TEST(MorphCompositeStringTest) { i::StrLength(c_string))); Local<String> lhs(v8::Utils::ToLocal( - factory->NewExternalStringFromAscii(&ascii_resource))); + factory->NewExternalStringFromAscii(&ascii_resource) + .ToHandleChecked())); Local<String> rhs(v8::Utils::ToLocal( - factory->NewExternalStringFromAscii(&ascii_resource))); + factory->NewExternalStringFromAscii(&ascii_resource) + .ToHandleChecked())); env->Global()->Set(v8_str("lhs"), lhs); env->Global()->Set(v8_str("rhs"), rhs); @@ -15609,7 +15741,8 @@ static void CheckElementValue(i::Isolate* isolate, int expected, i::Handle<i::Object> obj, int offset) { - i::Object* element = *i::Object::GetElement(isolate, obj, offset); + i::Object* element = + *i::Object::GetElement(isolate, obj, offset).ToHandleChecked(); CHECK_EQ(expected, i::Smi::cast(element)->value()); } @@ -15696,17 +15829,20 @@ THREADED_TEST(PixelArray) { i::Handle<i::Smi> value(i::Smi::FromInt(2), reinterpret_cast<i::Isolate*>(context->GetIsolate())); i::Handle<i::Object> no_failure; - no_failure = i::JSObject::SetElement(jsobj, 1, value, NONE, i::SLOPPY); + no_failure = i::JSObject::SetElement( + jsobj, 1, value, NONE, i::SLOPPY).ToHandleChecked(); ASSERT(!no_failure.is_null()); i::USE(no_failure); CheckElementValue(isolate, 2, jsobj, 1); *value.location() = i::Smi::FromInt(256); - no_failure = i::JSObject::SetElement(jsobj, 1, value, NONE, i::SLOPPY); + no_failure = i::JSObject::SetElement( + jsobj, 1, value, NONE, i::SLOPPY).ToHandleChecked(); ASSERT(!no_failure.is_null()); i::USE(no_failure); CheckElementValue(isolate, 255, jsobj, 1); *value.location() = i::Smi::FromInt(-1); - no_failure = i::JSObject::SetElement(jsobj, 1, value, NONE, i::SLOPPY); + no_failure = i::JSObject::SetElement( + jsobj, 1, value, NONE, i::SLOPPY).ToHandleChecked(); ASSERT(!no_failure.is_null()); i::USE(no_failure); CheckElementValue(isolate, 0, jsobj, 1); @@ -16243,7 +16379,8 @@ static void ObjectWithExternalArrayTestHelper( array_type == v8::kExternalFloat32Array) { CHECK_EQ(static_cast<int>(i::OS::nan_value()), static_cast<int>( - i::Object::GetElement(isolate, jsobj, 7)->Number())); + i::Object::GetElement( + isolate, jsobj, 7).ToHandleChecked()->Number())); } else { CheckElementValue(isolate, 0, jsobj, 7); } @@ -16255,7 +16392,8 @@ static void ObjectWithExternalArrayTestHelper( CHECK_EQ(2, result->Int32Value()); CHECK_EQ(2, static_cast<int>( - i::Object::GetElement(isolate, jsobj, 6)->Number())); + i::Object::GetElement( + isolate, jsobj, 6).ToHandleChecked()->Number())); if (array_type != v8::kExternalFloat32Array && array_type != v8::kExternalFloat64Array) { @@ -16535,7 +16673,8 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type, kElementCount); CHECK_EQ(1, static_cast<int>( - i::Object::GetElement(isolate, jsobj, 1)->Number())); + i::Object::GetElement( + isolate, jsobj, 1).ToHandleChecked()->Number())); ObjectWithExternalArrayTestHelper<ExternalArrayClass, ElementType>( context.local(), obj, kElementCount, array_type, low, high); @@ -17103,12 +17242,7 @@ void AnalyzeStackInNativeCode(const v8::FunctionCallbackInfo<v8::Value>& args) { stackTrace->GetFrame(0)); checkStackFrame(origin, "baz", 8, 3, false, true, stackTrace->GetFrame(1)); -#ifdef ENABLE_DEBUGGER_SUPPORT bool is_eval = true; -#else // ENABLE_DEBUGGER_SUPPORT - bool is_eval = false; -#endif // ENABLE_DEBUGGER_SUPPORT - // This is the source string inside the eval which has the call to baz. checkStackFrame(NULL, "", 1, 5, is_eval, false, stackTrace->GetFrame(2)); @@ -17872,7 +18006,8 @@ TEST(VisitExternalStrings) { CcTest::heap()->CollectAllAvailableGarbage(); // Tenure string. // Turn into a symbol. i::Handle<i::String> string3_i = v8::Utils::OpenHandle(*string3); - CHECK(!CcTest::heap()->InternalizeString(*string3_i)->IsFailure()); + CHECK(!CcTest::i_isolate()->factory()->InternalizeString( + string3_i).is_null()); CHECK(string3_i->IsInternalizedString()); // We need to add usages for string* to avoid warnings in GCC 4.7 @@ -18423,7 +18558,7 @@ static void SetterWhichSetsYOnThisTo23( const v8::PropertyCallbackInfo<void>& info) { CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject()); CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject()); - info.This()->Set(v8_str("y"), v8_num(23)); + Local<Object>::Cast(info.This())->Set(v8_str("y"), v8_num(23)); } @@ -18442,7 +18577,7 @@ void FooSetInterceptor(Local<String> name, CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject()); CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject()); if (!name->Equals(v8_str("foo"))) return; - info.This()->Set(v8_str("y"), v8_num(23)); + Local<Object>::Cast(info.This())->Set(v8_str("y"), v8_num(23)); info.GetReturnValue().Set(v8_num(23)); } @@ -18495,7 +18630,7 @@ static void NamedPropertySetterWhichSetsYOnThisTo23( Local<Value> value, const v8::PropertyCallbackInfo<v8::Value>& info) { if (name->Equals(v8_str("x"))) { - info.This()->Set(v8_str("y"), v8_num(23)); + Local<Object>::Cast(info.This())->Set(v8_str("y"), v8_num(23)); } } @@ -18900,8 +19035,7 @@ THREADED_TEST(TwoByteStringInAsciiCons) { int length = string->length(); CHECK(string->IsOneByteRepresentation()); - FlattenString(string); - i::Handle<i::String> flat_string = FlattenGetString(string); + i::Handle<i::String> flat_string = i::String::Flatten(string); CHECK(string->IsOneByteRepresentation()); CHECK(flat_string->IsOneByteRepresentation()); @@ -19402,7 +19536,7 @@ class InitDefaultIsolateThread : public v8::internal::Thread { case SetResourceConstraints: { static const int K = 1024; v8::ResourceConstraints constraints; - constraints.set_max_young_space_size(256 * K); + constraints.set_max_new_space_size(2 * K * K); constraints.set_max_old_space_size(4 * K * K); v8::SetResourceConstraints(CcTest::isolate(), &constraints); break; @@ -20527,9 +20661,9 @@ TEST(CallCompletedCallback) { env->Global()->Set(v8_str("recursion"), recursive_runtime->GetFunction()); // Adding the same callback a second time has no effect. - v8::V8::AddCallCompletedCallback(CallCompletedCallback1); - v8::V8::AddCallCompletedCallback(CallCompletedCallback1); - v8::V8::AddCallCompletedCallback(CallCompletedCallback2); + env->GetIsolate()->AddCallCompletedCallback(CallCompletedCallback1); + env->GetIsolate()->AddCallCompletedCallback(CallCompletedCallback1); + env->GetIsolate()->AddCallCompletedCallback(CallCompletedCallback2); i::OS::Print("--- Script (1) ---\n"); Local<Script> script = v8::Script::Compile( v8::String::NewFromUtf8(env->GetIsolate(), "recursion(0)")); @@ -20538,7 +20672,7 @@ TEST(CallCompletedCallback) { i::OS::Print("\n--- Script (2) ---\n"); callback_fired = 0; - v8::V8::RemoveCallCompletedCallback(CallCompletedCallback1); + env->GetIsolate()->RemoveCallCompletedCallback(CallCompletedCallback1); script->Run(); CHECK_EQ(2, callback_fired); @@ -20567,7 +20701,7 @@ void CallCompletedCallbackException() { TEST(CallCompletedCallbackOneException) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); - v8::V8::AddCallCompletedCallback(CallCompletedCallbackNoException); + env->GetIsolate()->AddCallCompletedCallback(CallCompletedCallbackNoException); CompileRun("throw 'exception';"); } @@ -20575,7 +20709,7 @@ TEST(CallCompletedCallbackOneException) { TEST(CallCompletedCallbackTwoExceptions) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); - v8::V8::AddCallCompletedCallback(CallCompletedCallbackException); + env->GetIsolate()->AddCallCompletedCallback(CallCompletedCallbackException); CompileRun("throw 'first exception';"); } @@ -20602,22 +20736,22 @@ TEST(EnqueueMicrotask) { CHECK_EQ(0, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(0, CompileRun("ext2Calls")->Int32Value()); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskOne)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskOne)); CompileRun("1+1;"); CHECK_EQ(1, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(0, CompileRun("ext2Calls")->Int32Value()); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskOne)); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskTwo)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskOne)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); CompileRun("1+1;"); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(1, CompileRun("ext2Calls")->Int32Value()); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskTwo)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); CompileRun("1+1;"); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(2, CompileRun("ext2Calls")->Int32Value()); @@ -20638,41 +20772,54 @@ TEST(SetAutorunMicrotasks) { CHECK_EQ(0, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(0, CompileRun("ext2Calls")->Int32Value()); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskOne)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskOne)); CompileRun("1+1;"); CHECK_EQ(1, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(0, CompileRun("ext2Calls")->Int32Value()); - V8::SetAutorunMicrotasks(env->GetIsolate(), false); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskOne)); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskTwo)); + env->GetIsolate()->SetAutorunMicrotasks(false); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskOne)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); CompileRun("1+1;"); CHECK_EQ(1, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(0, CompileRun("ext2Calls")->Int32Value()); - V8::RunMicrotasks(env->GetIsolate()); + env->GetIsolate()->RunMicrotasks(); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(1, CompileRun("ext2Calls")->Int32Value()); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskTwo)); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); CompileRun("1+1;"); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(1, CompileRun("ext2Calls")->Int32Value()); - V8::RunMicrotasks(env->GetIsolate()); + env->GetIsolate()->RunMicrotasks(); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(2, CompileRun("ext2Calls")->Int32Value()); - V8::SetAutorunMicrotasks(env->GetIsolate(), true); - v8::V8::EnqueueMicrotask(env->GetIsolate(), - Function::New(env->GetIsolate(), MicrotaskTwo)); + env->GetIsolate()->SetAutorunMicrotasks(true); + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); CompileRun("1+1;"); CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); CHECK_EQ(3, CompileRun("ext2Calls")->Int32Value()); + + env->GetIsolate()->EnqueueMicrotask( + Function::New(env->GetIsolate(), MicrotaskTwo)); + { + v8::Isolate::SuppressMicrotaskExecutionScope scope(env->GetIsolate()); + CompileRun("1+1;"); + CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); + CHECK_EQ(3, CompileRun("ext2Calls")->Int32Value()); + } + + CompileRun("1+1;"); + CHECK_EQ(2, CompileRun("ext1Calls")->Int32Value()); + CHECK_EQ(4, CompileRun("ext2Calls")->Int32Value()); } @@ -21922,7 +22069,7 @@ THREADED_TEST(FunctionNew) { i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Handle<i::JSObject> cache(i_isolate->native_context()->function_cache()); i::Handle<i::Object> elm = - i::Object::GetElementNoExceptionThrown(i_isolate, cache, serial_number); + i::Object::GetElement(i_isolate, cache, serial_number).ToHandleChecked(); CHECK(elm->IsUndefined()); // Verify that each Function::New creates a new function instance Local<Object> data2 = v8::Object::New(isolate); @@ -22225,20 +22372,20 @@ TEST(Promises) { p->Chain(f1); CHECK_EQ(0, global->Get(v8_str("x1"))->Int32Value()); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(1, global->Get(v8_str("x1"))->Int32Value()); p->Catch(f2); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); r->Catch(f2); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(2, global->Get(v8_str("x2"))->Int32Value()); r->Chain(f1); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(1, global->Get(v8_str("x1"))->Int32Value()); // Chaining pending promises. @@ -22248,7 +22395,7 @@ TEST(Promises) { pr->GetPromise()->Chain(f1); rr->GetPromise()->Catch(f2); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(0, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); @@ -22257,7 +22404,7 @@ TEST(Promises) { CHECK_EQ(0, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(1, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(2, global->Get(v8_str("x2"))->Int32Value()); @@ -22268,7 +22415,7 @@ TEST(Promises) { pr->Resolve(v8::Integer::New(isolate, 3)); CHECK_EQ(0, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(3, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(4, global->Get(v8_str("x2"))->Int32Value()); @@ -22278,7 +22425,7 @@ TEST(Promises) { rr->Reject(v8::Integer::New(isolate, 3)); CHECK_EQ(0, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(0, global->Get(v8_str("x2"))->Int32Value()); - V8::RunMicrotasks(isolate); + isolate->RunMicrotasks(); CHECK_EQ(3, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(4, global->Get(v8_str("x2"))->Int32Value()); } @@ -22363,3 +22510,16 @@ TEST(Regress354123) { CompileRun("Object.getPrototypeOf(friend);"); CHECK_EQ(2, named_access_count); } + + +TEST(CaptureStackTraceForStackOverflow) { + v8::internal::FLAG_stack_size = 150; + LocalContext current; + v8::Isolate* isolate = current->GetIsolate(); + v8::HandleScope scope(isolate); + V8::SetCaptureStackTraceForUncaughtExceptions( + true, 10, v8::StackTrace::kDetailed); + v8::TryCatch try_catch; + CompileRun("(function f(x) { f(x+1); })(0)"); + CHECK(try_catch.HasCaught()); +} diff --git a/deps/v8/test/cctest/test-assembler-arm.cc b/deps/v8/test/cctest/test-assembler-arm.cc index 9c1c04fe3..470cd6163 100644 --- a/deps/v8/test/cctest/test-assembler-arm.cc +++ b/deps/v8/test/cctest/test-assembler-arm.cc @@ -57,15 +57,12 @@ TEST(0) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F2 f = FUNCTION_CAST<F2>(Code::cast(code)->entry()); + F2 f = FUNCTION_CAST<F2>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 3, 4, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(7, res); @@ -95,15 +92,12 @@ TEST(1) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 100, 0, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(5050, res); @@ -142,15 +136,12 @@ TEST(2) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 10, 0, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(3628800, res); @@ -191,15 +182,12 @@ TEST(3) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.i = 100000; t.c = 10; t.s = 1000; @@ -292,9 +280,9 @@ TEST(4) { __ vstr(d4, r4, OFFSET_OF(T, f)); // Convert from fixed point to floating point. - __ mov(lr, Operand(1234)); + __ mov(lr, Operand(2468)); __ vmov(s8, lr); - __ vcvt_f64_s32(d4, 1); + __ vcvt_f64_s32(d4, 2); __ vstr(d4, r4, OFFSET_OF(T, j)); // Test vabs. @@ -317,15 +305,12 @@ TEST(4) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5; t.b = 2.75; t.c = 17.17; @@ -380,15 +365,12 @@ TEST(5) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>( CALL_GENERATED_CODE(f, 0xAAAAAAAA, 0, 0, 0, 0)); ::printf("f() = %d\n", res); @@ -416,15 +398,12 @@ TEST(6) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>( CALL_GENERATED_CODE(f, 0xFFFF, 0, 0, 0, 0)); ::printf("f() = %d\n", res); @@ -492,15 +471,12 @@ static void TestRoundingMode(VCVTTypes types, CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>( CALL_GENERATED_CODE(f, 0, 0, 0, 0, 0)); ::printf("res = %d\n", res); @@ -678,15 +654,12 @@ TEST(8) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F4 fn = FUNCTION_CAST<F4>(Code::cast(code)->entry()); + F4 fn = FUNCTION_CAST<F4>(code->entry()); d.a = 1.1; d.b = 2.2; d.c = 3.3; @@ -790,15 +763,12 @@ TEST(9) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F4 fn = FUNCTION_CAST<F4>(Code::cast(code)->entry()); + F4 fn = FUNCTION_CAST<F4>(code->entry()); d.a = 1.1; d.b = 2.2; d.c = 3.3; @@ -898,15 +868,12 @@ TEST(10) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F4 fn = FUNCTION_CAST<F4>(Code::cast(code)->entry()); + F4 fn = FUNCTION_CAST<F4>(code->entry()); d.a = 1.1; d.b = 2.2; d.c = 3.3; @@ -995,15 +962,12 @@ TEST(11) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); Object* dummy = CALL_GENERATED_CODE(f, &i, 0, 0, 0, 0); USE(dummy); @@ -1125,15 +1089,12 @@ TEST(13) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5; t.b = 2.75; t.c = 17.17; @@ -1200,15 +1161,12 @@ TEST(14) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.left = BitCast<double>(kHoleNanInt64); t.right = 1; t.add_result = 0; @@ -1306,15 +1264,12 @@ TEST(15) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.src0 = 0x01020304; t.src1 = 0x11121314; t.src2 = 0x21222324; @@ -1411,15 +1366,12 @@ TEST(16) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.src0 = 0x01020304; t.src1 = 0x11121314; t.src2 = 0x11121300; @@ -1496,15 +1448,12 @@ TEST(18) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); Object* dummy; TEST_SDIV(1073741824, kMinInt, -2); TEST_SDIV(kMinInt, kMinInt, -1); @@ -1586,9 +1535,8 @@ TEST(code_relative_offset) { CodeDesc desc; assm.GetCode(&desc); - Handle<Code> code = isolate->factory()->NewCode(desc, - Code::ComputeFlags(Code::STUB), code_object); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), code_object); F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 21, 0, 0, 0, 0)); ::printf("f() = %d\n", res); diff --git a/deps/v8/test/cctest/test-assembler-arm64.cc b/deps/v8/test/cctest/test-assembler-arm64.cc index 51c202fc0..25f3adb50 100644 --- a/deps/v8/test/cctest/test-assembler-arm64.cc +++ b/deps/v8/test/cctest/test-assembler-arm64.cc @@ -174,10 +174,14 @@ static void InitializeVM() { byte* buf = new byte[buf_size]; \ MacroAssembler masm(isolate, buf, buf_size); \ RegisterDump core; \ - CPU::SetUp(); + CpuFeatures::Probe(false); #define RESET() \ - __ Reset(); + __ Reset(); \ + /* Reset the machine state (like simulator.ResetState()). */ \ + __ Msr(NZCV, xzr); \ + __ Msr(FPCR, xzr); + #define START_AFTER_RESET() \ __ SetStackPointer(csp); \ @@ -1686,6 +1690,71 @@ TEST(adr) { } +TEST(adr_far) { + INIT_V8(); + + int max_range = 1 << (Instruction::ImmPCRelRangeBitwidth - 1); + SETUP_SIZE(max_range + 1000 * kInstructionSize); + + Label done, fail; + Label test_near, near_forward, near_backward; + Label test_far, far_forward, far_backward; + + START(); + __ Mov(x0, 0x0); + + __ Bind(&test_near); + __ Adr(x10, &near_forward, MacroAssembler::kAdrFar); + __ Br(x10); + __ B(&fail); + __ Bind(&near_backward); + __ Orr(x0, x0, 1 << 1); + __ B(&test_far); + + __ Bind(&near_forward); + __ Orr(x0, x0, 1 << 0); + __ Adr(x10, &near_backward, MacroAssembler::kAdrFar); + __ Br(x10); + + __ Bind(&test_far); + __ Adr(x10, &far_forward, MacroAssembler::kAdrFar); + __ Br(x10); + __ B(&fail); + __ Bind(&far_backward); + __ Orr(x0, x0, 1 << 3); + __ B(&done); + + for (unsigned i = 0; i < max_range / kInstructionSize + 1; ++i) { + if (i % 100 == 0) { + // If we do land in this code, we do not want to execute so many nops + // before reaching the end of test (especially if tracing is activated). + __ b(&fail); + } else { + __ nop(); + } + } + + + __ Bind(&far_forward); + __ Orr(x0, x0, 1 << 2); + __ Adr(x10, &far_backward, MacroAssembler::kAdrFar); + __ Br(x10); + + __ B(&done); + __ Bind(&fail); + __ Orr(x0, x0, 1 << 4); + __ Bind(&done); + + END(); + + RUN(); + + ASSERT_EQUAL_64(0xf, x0); + + TEARDOWN(); +} + + TEST(branch_cond) { INIT_V8(); SETUP(); @@ -6032,6 +6101,7 @@ TEST(frinta) { __ Fmov(s24, kFP32NegativeInfinity); __ Fmov(s25, 0.0); __ Fmov(s26, -0.0); + __ Fmov(s27, -0.2); __ Frinta(s0, s16); __ Frinta(s1, s17); @@ -6044,6 +6114,7 @@ TEST(frinta) { __ Frinta(s8, s24); __ Frinta(s9, s25); __ Frinta(s10, s26); + __ Frinta(s11, s27); __ Fmov(d16, 1.0); __ Fmov(d17, 1.1); @@ -6056,18 +6127,20 @@ TEST(frinta) { __ Fmov(d24, kFP32NegativeInfinity); __ Fmov(d25, 0.0); __ Fmov(d26, -0.0); + __ Fmov(d27, -0.2); - __ Frinta(d11, d16); - __ Frinta(d12, d17); - __ Frinta(d13, d18); - __ Frinta(d14, d19); - __ Frinta(d15, d20); - __ Frinta(d16, d21); - __ Frinta(d17, d22); - __ Frinta(d18, d23); - __ Frinta(d19, d24); - __ Frinta(d20, d25); - __ Frinta(d21, d26); + __ Frinta(d12, d16); + __ Frinta(d13, d17); + __ Frinta(d14, d18); + __ Frinta(d15, d19); + __ Frinta(d16, d20); + __ Frinta(d17, d21); + __ Frinta(d18, d22); + __ Frinta(d19, d23); + __ Frinta(d20, d24); + __ Frinta(d21, d25); + __ Frinta(d22, d26); + __ Frinta(d23, d27); END(); RUN(); @@ -6083,17 +6156,108 @@ TEST(frinta) { ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); ASSERT_EQUAL_FP32(0.0, s9); ASSERT_EQUAL_FP32(-0.0, s10); - ASSERT_EQUAL_FP64(1.0, d11); + ASSERT_EQUAL_FP32(-0.0, s11); ASSERT_EQUAL_FP64(1.0, d12); - ASSERT_EQUAL_FP64(2.0, d13); + ASSERT_EQUAL_FP64(1.0, d13); ASSERT_EQUAL_FP64(2.0, d14); - ASSERT_EQUAL_FP64(3.0, d15); - ASSERT_EQUAL_FP64(-2.0, d16); - ASSERT_EQUAL_FP64(-3.0, d17); - ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d18); - ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d19); - ASSERT_EQUAL_FP64(0.0, d20); - ASSERT_EQUAL_FP64(-0.0, d21); + ASSERT_EQUAL_FP64(2.0, d15); + ASSERT_EQUAL_FP64(3.0, d16); + ASSERT_EQUAL_FP64(-2.0, d17); + ASSERT_EQUAL_FP64(-3.0, d18); + ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); + ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); + ASSERT_EQUAL_FP64(0.0, d21); + ASSERT_EQUAL_FP64(-0.0, d22); + ASSERT_EQUAL_FP64(-0.0, d23); + + TEARDOWN(); +} + + +TEST(frintm) { + INIT_V8(); + SETUP(); + + START(); + __ Fmov(s16, 1.0); + __ Fmov(s17, 1.1); + __ Fmov(s18, 1.5); + __ Fmov(s19, 1.9); + __ Fmov(s20, 2.5); + __ Fmov(s21, -1.5); + __ Fmov(s22, -2.5); + __ Fmov(s23, kFP32PositiveInfinity); + __ Fmov(s24, kFP32NegativeInfinity); + __ Fmov(s25, 0.0); + __ Fmov(s26, -0.0); + __ Fmov(s27, -0.2); + + __ Frintm(s0, s16); + __ Frintm(s1, s17); + __ Frintm(s2, s18); + __ Frintm(s3, s19); + __ Frintm(s4, s20); + __ Frintm(s5, s21); + __ Frintm(s6, s22); + __ Frintm(s7, s23); + __ Frintm(s8, s24); + __ Frintm(s9, s25); + __ Frintm(s10, s26); + __ Frintm(s11, s27); + + __ Fmov(d16, 1.0); + __ Fmov(d17, 1.1); + __ Fmov(d18, 1.5); + __ Fmov(d19, 1.9); + __ Fmov(d20, 2.5); + __ Fmov(d21, -1.5); + __ Fmov(d22, -2.5); + __ Fmov(d23, kFP32PositiveInfinity); + __ Fmov(d24, kFP32NegativeInfinity); + __ Fmov(d25, 0.0); + __ Fmov(d26, -0.0); + __ Fmov(d27, -0.2); + + __ Frintm(d12, d16); + __ Frintm(d13, d17); + __ Frintm(d14, d18); + __ Frintm(d15, d19); + __ Frintm(d16, d20); + __ Frintm(d17, d21); + __ Frintm(d18, d22); + __ Frintm(d19, d23); + __ Frintm(d20, d24); + __ Frintm(d21, d25); + __ Frintm(d22, d26); + __ Frintm(d23, d27); + END(); + + RUN(); + + ASSERT_EQUAL_FP32(1.0, s0); + ASSERT_EQUAL_FP32(1.0, s1); + ASSERT_EQUAL_FP32(1.0, s2); + ASSERT_EQUAL_FP32(1.0, s3); + ASSERT_EQUAL_FP32(2.0, s4); + ASSERT_EQUAL_FP32(-2.0, s5); + ASSERT_EQUAL_FP32(-3.0, s6); + ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7); + ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); + ASSERT_EQUAL_FP32(0.0, s9); + ASSERT_EQUAL_FP32(-0.0, s10); + ASSERT_EQUAL_FP32(-1.0, s11); + ASSERT_EQUAL_FP64(1.0, d12); + ASSERT_EQUAL_FP64(1.0, d13); + ASSERT_EQUAL_FP64(1.0, d14); + ASSERT_EQUAL_FP64(1.0, d15); + ASSERT_EQUAL_FP64(2.0, d16); + ASSERT_EQUAL_FP64(-2.0, d17); + ASSERT_EQUAL_FP64(-3.0, d18); + ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); + ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); + ASSERT_EQUAL_FP64(0.0, d21); + ASSERT_EQUAL_FP64(-0.0, d22); + ASSERT_EQUAL_FP64(-1.0, d23); TEARDOWN(); } @@ -6115,6 +6279,7 @@ TEST(frintn) { __ Fmov(s24, kFP32NegativeInfinity); __ Fmov(s25, 0.0); __ Fmov(s26, -0.0); + __ Fmov(s27, -0.2); __ Frintn(s0, s16); __ Frintn(s1, s17); @@ -6127,6 +6292,7 @@ TEST(frintn) { __ Frintn(s8, s24); __ Frintn(s9, s25); __ Frintn(s10, s26); + __ Frintn(s11, s27); __ Fmov(d16, 1.0); __ Fmov(d17, 1.1); @@ -6139,18 +6305,20 @@ TEST(frintn) { __ Fmov(d24, kFP32NegativeInfinity); __ Fmov(d25, 0.0); __ Fmov(d26, -0.0); + __ Fmov(d27, -0.2); - __ Frintn(d11, d16); - __ Frintn(d12, d17); - __ Frintn(d13, d18); - __ Frintn(d14, d19); - __ Frintn(d15, d20); - __ Frintn(d16, d21); - __ Frintn(d17, d22); - __ Frintn(d18, d23); - __ Frintn(d19, d24); - __ Frintn(d20, d25); - __ Frintn(d21, d26); + __ Frintn(d12, d16); + __ Frintn(d13, d17); + __ Frintn(d14, d18); + __ Frintn(d15, d19); + __ Frintn(d16, d20); + __ Frintn(d17, d21); + __ Frintn(d18, d22); + __ Frintn(d19, d23); + __ Frintn(d20, d24); + __ Frintn(d21, d25); + __ Frintn(d22, d26); + __ Frintn(d23, d27); END(); RUN(); @@ -6166,17 +6334,19 @@ TEST(frintn) { ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); ASSERT_EQUAL_FP32(0.0, s9); ASSERT_EQUAL_FP32(-0.0, s10); - ASSERT_EQUAL_FP64(1.0, d11); + ASSERT_EQUAL_FP32(-0.0, s11); ASSERT_EQUAL_FP64(1.0, d12); - ASSERT_EQUAL_FP64(2.0, d13); + ASSERT_EQUAL_FP64(1.0, d13); ASSERT_EQUAL_FP64(2.0, d14); ASSERT_EQUAL_FP64(2.0, d15); - ASSERT_EQUAL_FP64(-2.0, d16); + ASSERT_EQUAL_FP64(2.0, d16); ASSERT_EQUAL_FP64(-2.0, d17); - ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d18); - ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d19); - ASSERT_EQUAL_FP64(0.0, d20); - ASSERT_EQUAL_FP64(-0.0, d21); + ASSERT_EQUAL_FP64(-2.0, d18); + ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); + ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); + ASSERT_EQUAL_FP64(0.0, d21); + ASSERT_EQUAL_FP64(-0.0, d22); + ASSERT_EQUAL_FP64(-0.0, d23); TEARDOWN(); } @@ -10771,19 +10941,15 @@ TEST(pool_size) { __ bind(&exit); - Heap* heap = isolate->heap(); + HandleScope handle_scope(isolate); CodeDesc desc; - Object* code_object = NULL; - Code* code; masm.GetCode(&desc); - MaybeObject* maybe_code = heap->CreateCode(desc, 0, masm.CodeObject()); - maybe_code->ToObject(&code_object); - code = Code::cast(code_object); + Handle<Code> code = isolate->factory()->NewCode(desc, 0, masm.CodeObject()); unsigned pool_count = 0; int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - for (RelocIterator it(code, pool_mask); !it.done(); it.next()) { + for (RelocIterator it(*code, pool_mask); !it.done(); it.next()) { RelocInfo* info = it.rinfo(); if (RelocInfo::IsConstPool(info->rmode())) { ASSERT(info->data() == constant_pool_size); diff --git a/deps/v8/test/cctest/test-assembler-ia32.cc b/deps/v8/test/cctest/test-assembler-ia32.cc index db28231d6..ba83b3d7e 100644 --- a/deps/v8/test/cctest/test-assembler-ia32.cc +++ b/deps/v8/test/cctest/test-assembler-ia32.cc @@ -60,15 +60,12 @@ TEST(AssemblerIa320) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F2 f = FUNCTION_CAST<F2>(Code::cast(code)->entry()); + F2 f = FUNCTION_CAST<F2>(code->entry()); int res = f(3, 4); ::printf("f() = %d\n", res); CHECK_EQ(7, res); @@ -99,15 +96,12 @@ TEST(AssemblerIa321) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = f(100); ::printf("f() = %d\n", res); CHECK_EQ(5050, res); @@ -142,15 +136,12 @@ TEST(AssemblerIa322) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = f(10); ::printf("f() = %d\n", res); CHECK_EQ(3628800, res); @@ -177,10 +168,8 @@ TEST(AssemblerIa323) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); // don't print the code - our disassembler can't handle cvttss2si // instead print bytes Disassembler::Dump(stdout, @@ -212,10 +201,8 @@ TEST(AssemblerIa324) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); // don't print the code - our disassembler can't handle cvttsd2si // instead print bytes Disassembler::Dump(stdout, @@ -242,10 +229,8 @@ TEST(AssemblerIa325) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); F0 f = FUNCTION_CAST<F0>(code->entry()); int res = f(); CHECK_EQ(42, res); @@ -279,10 +264,8 @@ TEST(AssemblerIa326) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef DEBUG ::printf("\n---\n"); // don't print the code - our disassembler can't handle SSE instructions @@ -319,15 +302,12 @@ TEST(AssemblerIa328) { __ ret(0); CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F6 f = FUNCTION_CAST<F6>(Code::cast(code)->entry()); + F6 f = FUNCTION_CAST<F6>(code->entry()); double res = f(12); ::printf("f() = %f\n", res); @@ -375,16 +355,13 @@ TEST(AssemblerIa329) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F7 f = FUNCTION_CAST<F7>(Code::cast(code)->entry()); + F7 f = FUNCTION_CAST<F7>(code->entry()); CHECK_EQ(kLess, f(1.1, 2.2)); CHECK_EQ(kEqual, f(2.2, 2.2)); CHECK_EQ(kGreater, f(3.3, 2.2)); @@ -461,10 +438,8 @@ TEST(AssemblerMultiByteNop) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); CHECK(code->IsCode()); F0 f = FUNCTION_CAST<F0>(code->entry()); @@ -514,13 +489,10 @@ void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); - F0 f = FUNCTION_CAST<F0>(Code::cast(code)->entry()); + F0 f = FUNCTION_CAST<F0>(code->entry()); int res = f(); args.GetReturnValue().Set(v8::Integer::New(CcTest::isolate(), res)); } @@ -584,16 +556,13 @@ TEST(AssemblerIa32Extractps) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F4 f = FUNCTION_CAST<F4>(Code::cast(code)->entry()); + F4 f = FUNCTION_CAST<F4>(code->entry()); uint64_t value1 = V8_2PART_UINT64_C(0x12345678, 87654321); CHECK_EQ(0x12345678, f(uint64_to_double(value1))); uint64_t value2 = V8_2PART_UINT64_C(0x87654321, 12345678); @@ -627,16 +596,13 @@ TEST(AssemblerIa32SSE) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F8 f = FUNCTION_CAST<F8>(Code::cast(code)->entry()); + F8 f = FUNCTION_CAST<F8>(code->entry()); CHECK_EQ(2, f(1.0, 2.0)); } diff --git a/deps/v8/test/cctest/test-assembler-mips.cc b/deps/v8/test/cctest/test-assembler-mips.cc index 534c4cf0a..e93c1ca45 100644 --- a/deps/v8/test/cctest/test-assembler-mips.cc +++ b/deps/v8/test/cctest/test-assembler-mips.cc @@ -61,12 +61,9 @@ TEST(MIPS0) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F2 f = FUNCTION_CAST<F2>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F2 f = FUNCTION_CAST<F2>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 0xab0, 0xc, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(0xabc, res); @@ -100,12 +97,9 @@ TEST(MIPS1) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F1 f = FUNCTION_CAST<F1>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F1 f = FUNCTION_CAST<F1>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 50, 0, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(1275, res); @@ -241,12 +235,9 @@ TEST(MIPS2) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F2 f = FUNCTION_CAST<F2>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F2 f = FUNCTION_CAST<F2>(code->entry()); int res = reinterpret_cast<int>(CALL_GENERATED_CODE(f, 0xab0, 0xc, 0, 0, 0)); ::printf("f() = %d\n", res); CHECK_EQ(0x31415926, res); @@ -314,12 +305,9 @@ TEST(MIPS3) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5e14; t.b = 2.75e11; t.c = 0.0; @@ -382,12 +370,9 @@ TEST(MIPS4) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5e22; t.b = 2.75e11; t.c = 17.17; @@ -448,12 +433,9 @@ TEST(MIPS5) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5e4; t.b = 2.75e8; t.i = 12345678; @@ -521,23 +503,30 @@ TEST(MIPS6) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.ui = 0x11223344; t.si = 0x99aabbcc; Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0); USE(dummy); CHECK_EQ(0x11223344, t.r1); +#if __BYTE_ORDER == __LITTLE_ENDIAN CHECK_EQ(0x3344, t.r2); CHECK_EQ(0xffffbbcc, t.r3); CHECK_EQ(0x0000bbcc, t.r4); CHECK_EQ(0xffffffcc, t.r5); CHECK_EQ(0x3333bbcc, t.r6); +#elif __BYTE_ORDER == __BIG_ENDIAN + CHECK_EQ(0x1122, t.r2); + CHECK_EQ(0xffff99aa, t.r3); + CHECK_EQ(0x000099aa, t.r4); + CHECK_EQ(0xffffff99, t.r5); + CHECK_EQ(0x99aa3333, t.r6); +#else +#error Unknown endianness +#endif } @@ -598,12 +587,9 @@ TEST(MIPS7) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 1.5e14; t.b = 2.75e11; t.c = 2.0; @@ -697,12 +683,9 @@ TEST(MIPS8) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.input = 0x12345678; Object* dummy = CALL_GENERATED_CODE(f, &t, 0x0, 0, 0, 0); USE(dummy); @@ -745,11 +728,8 @@ TEST(MIPS9) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); } @@ -799,12 +779,9 @@ TEST(MIPS10) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.a = 2.147483646e+09; // 0x7FFFFFFE -> 0xFF80000041DFFFFF as double. t.b_word = 0x0ff00ff0; // 0x0FF00FF0 -> 0x as double. Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0); @@ -930,18 +907,16 @@ TEST(MIPS11) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.reg_init = 0xaabbccdd; t.mem_init = 0x11223344; Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0); USE(dummy); +#if __BYTE_ORDER == __LITTLE_ENDIAN CHECK_EQ(0x44bbccdd, t.lwl_0); CHECK_EQ(0x3344ccdd, t.lwl_1); CHECK_EQ(0x223344dd, t.lwl_2); @@ -961,6 +936,29 @@ TEST(MIPS11) { CHECK_EQ(0xbbccdd44, t.swr_1); CHECK_EQ(0xccdd3344, t.swr_2); CHECK_EQ(0xdd223344, t.swr_3); +#elif __BYTE_ORDER == __BIG_ENDIAN + CHECK_EQ(0x11223344, t.lwl_0); + CHECK_EQ(0x223344dd, t.lwl_1); + CHECK_EQ(0x3344ccdd, t.lwl_2); + CHECK_EQ(0x44bbccdd, t.lwl_3); + + CHECK_EQ(0xaabbcc11, t.lwr_0); + CHECK_EQ(0xaabb1122, t.lwr_1); + CHECK_EQ(0xaa112233, t.lwr_2); + CHECK_EQ(0x11223344, t.lwr_3); + + CHECK_EQ(0xaabbccdd, t.swl_0); + CHECK_EQ(0x11aabbcc, t.swl_1); + CHECK_EQ(0x1122aabb, t.swl_2); + CHECK_EQ(0x112233aa, t.swl_3); + + CHECK_EQ(0xdd223344, t.swr_0); + CHECK_EQ(0xccdd3344, t.swr_1); + CHECK_EQ(0xbbccdd44, t.swr_2); + CHECK_EQ(0xaabbccdd, t.swr_3); +#else +#error Unknown endianness +#endif } @@ -1035,12 +1033,9 @@ TEST(MIPS12) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.x = 1; t.y = 2; t.y1 = 3; @@ -1092,12 +1087,9 @@ TEST(MIPS13) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.cvt_big_in = 0xFFFFFFFF; t.cvt_small_in = 333; @@ -1213,12 +1205,9 @@ TEST(MIPS14) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + F3 f = FUNCTION_CAST<F3>(code->entry()); t.round_up_in = 123.51; t.round_down_in = 123.49; diff --git a/deps/v8/test/cctest/test-assembler-x64.cc b/deps/v8/test/cctest/test-assembler-x64.cc index 446cec6ad..eb9fee854 100644 --- a/deps/v8/test/cctest/test-assembler-x64.cc +++ b/deps/v8/test/cctest/test-assembler-x64.cc @@ -141,6 +141,37 @@ TEST(AssemblerX64ArithmeticOperations) { } +TEST(AssemblerX64CmpbOperation) { + // Allocate an executable page of memory. + size_t actual_size; + byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize, + &actual_size, + true)); + CHECK(buffer); + Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size)); + + // Assemble a function that compare argument byte returing 1 if equal else 0. + // On Windows, it compares rcx with rdx which does not require REX prefix; + // on Linux, it compares rdi with rsi which requires REX prefix. + + Label done; + __ movq(rax, Immediate(1)); + __ cmpb(arg1, arg2); + __ j(equal, &done); + __ movq(rax, Immediate(0)); + __ bind(&done); + __ ret(0); + + CodeDesc desc; + assm.GetCode(&desc); + // Call the function from C++. + int result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2002); + CHECK_EQ(1, result); + result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2003); + CHECK_EQ(0, result); +} + + TEST(AssemblerX64ImulOperation) { // Allocate an executable page of memory. size_t actual_size; @@ -544,11 +575,8 @@ TEST(AssemblerMultiByteNop) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); F0 f = FUNCTION_CAST<F0>(code->entry()); int res = f(); @@ -576,7 +604,7 @@ void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) { // Store input vector on the stack. for (int i = 0; i < ELEMENT_COUNT; i++) { __ movl(rax, Immediate(vec->Get(i)->Int32Value())); - __ shl(rax, Immediate(0x20)); + __ shlq(rax, Immediate(0x20)); __ orq(rax, Immediate(vec->Get(++i)->Int32Value())); __ pushq(rax); } @@ -596,11 +624,8 @@ void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); F0 f = FUNCTION_CAST<F0>(code->entry()); int res = f(); @@ -661,16 +686,13 @@ TEST(AssemblerX64Extractps) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry()); + F3 f = FUNCTION_CAST<F3>(code->entry()); uint64_t value1 = V8_2PART_UINT64_C(0x12345678, 87654321); CHECK_EQ(0x12345678, f(uint64_to_double(value1))); uint64_t value2 = V8_2PART_UINT64_C(0x87654321, 12345678); @@ -700,16 +722,15 @@ TEST(AssemblerX64SSE) { CodeDesc desc; assm.GetCode(&desc); - Code* code = Code::cast(isolate->heap()->CreateCode( + Handle<Code> code = isolate->factory()->NewCode( desc, Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked()); - CHECK(code->IsCode()); + Handle<Code>()); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); + code->Print(); #endif - F6 f = FUNCTION_CAST<F6>(Code::cast(code)->entry()); + F6 f = FUNCTION_CAST<F6>(code->entry()); CHECK_EQ(2, f(1.0, 2.0)); } #undef __ diff --git a/deps/v8/test/cctest/test-atomicops.cc b/deps/v8/test/cctest/test-atomicops.cc index eba956c85..53df22963 100644 --- a/deps/v8/test/cctest/test-atomicops.cc +++ b/deps/v8/test/cctest/test-atomicops.cc @@ -214,6 +214,21 @@ static void TestStore() { } +// Merge this test with TestStore as soon as we have Atomic8 acquire +// and release stores. +static void TestStoreAtomic8() { + const Atomic8 kVal1 = TestFillValue<Atomic8>(); + const Atomic8 kVal2 = static_cast<Atomic8>(-1); + + Atomic8 value; + + NoBarrier_Store(&value, kVal1); + CHECK_EQU(kVal1, value); + NoBarrier_Store(&value, kVal2); + CHECK_EQU(kVal2, value); +} + + // This is a simple sanity check to ensure that values are correct. // Not testing atomicity. template <class AtomicType> @@ -240,6 +255,21 @@ static void TestLoad() { } +// Merge this test with TestLoad as soon as we have Atomic8 acquire +// and release loads. +static void TestLoadAtomic8() { + const Atomic8 kVal1 = TestFillValue<Atomic8>(); + const Atomic8 kVal2 = static_cast<Atomic8>(-1); + + Atomic8 value; + + value = kVal1; + CHECK_EQU(kVal1, NoBarrier_Load(&value)); + value = kVal2; + CHECK_EQU(kVal2, NoBarrier_Load(&value)); +} + + TEST(AtomicIncrement) { TestAtomicIncrement<Atomic32>(); TestAtomicIncrement<AtomicWord>(); @@ -265,12 +295,14 @@ TEST(AtomicIncrementBounds) { TEST(Store) { + TestStoreAtomic8(); TestStore<Atomic32>(); TestStore<AtomicWord>(); } TEST(Load) { + TestLoadAtomic8(); TestLoad<Atomic32>(); TestLoad<AtomicWord>(); } diff --git a/deps/v8/test/cctest/test-code-stubs-arm.cc b/deps/v8/test/cctest/test-code-stubs-arm.cc index 53cdd1613..43233472b 100644 --- a/deps/v8/test/cctest/test-code-stubs-arm.cc +++ b/deps/v8/test/cctest/test-code-stubs-arm.cc @@ -53,9 +53,10 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate, CHECK(buffer); HandleScope handles(isolate); MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size)); - DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath); + DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true, + inline_fastpath); - byte* start = stub.GetCode(isolate)->instruction_start(); + byte* start = stub.GetCode()->instruction_start(); Label done; // Save callee save registers. diff --git a/deps/v8/test/cctest/test-code-stubs-arm64.cc b/deps/v8/test/cctest/test-code-stubs-arm64.cc index 7ddefdde1..3ad07bf80 100644 --- a/deps/v8/test/cctest/test-code-stubs-arm64.cc +++ b/deps/v8/test/cctest/test-code-stubs-arm64.cc @@ -53,9 +53,10 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate, CHECK(buffer); HandleScope handles(isolate); MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size)); - DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath); + DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true, + inline_fastpath); - byte* start = stub.GetCode(isolate)->instruction_start(); + byte* start = stub.GetCode()->instruction_start(); Label done; __ SetStackPointer(csp); diff --git a/deps/v8/test/cctest/test-code-stubs-ia32.cc b/deps/v8/test/cctest/test-code-stubs-ia32.cc index c206a0102..96639577b 100644 --- a/deps/v8/test/cctest/test-code-stubs-ia32.cc +++ b/deps/v8/test/cctest/test-code-stubs-ia32.cc @@ -55,8 +55,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate, MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size)); int offset = source_reg.is(esp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize); - DoubleToIStub stub(source_reg, destination_reg, offset, true); - byte* start = stub.GetCode(isolate)->instruction_start(); + DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true); + byte* start = stub.GetCode()->instruction_start(); __ push(ebx); __ push(ecx); diff --git a/deps/v8/test/cctest/test-code-stubs-mips.cc b/deps/v8/test/cctest/test-code-stubs-mips.cc index 8dce89694..f88979678 100644 --- a/deps/v8/test/cctest/test-code-stubs-mips.cc +++ b/deps/v8/test/cctest/test-code-stubs-mips.cc @@ -54,9 +54,10 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate, CHECK(buffer); HandleScope handles(isolate); MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size)); - DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath); + DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true, + inline_fastpath); - byte* start = stub.GetCode(isolate)->instruction_start(); + byte* start = stub.GetCode()->instruction_start(); Label done; // Save callee save registers. diff --git a/deps/v8/test/cctest/test-code-stubs-x64.cc b/deps/v8/test/cctest/test-code-stubs-x64.cc index 348b21aca..3ffd292c5 100644 --- a/deps/v8/test/cctest/test-code-stubs-x64.cc +++ b/deps/v8/test/cctest/test-code-stubs-x64.cc @@ -54,8 +54,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate, MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size)); int offset = source_reg.is(rsp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize); - DoubleToIStub stub(source_reg, destination_reg, offset, true); - byte* start = stub.GetCode(isolate)->instruction_start(); + DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true); + byte* start = stub.GetCode()->instruction_start(); __ pushq(rbx); __ pushq(rcx); diff --git a/deps/v8/test/cctest/test-compiler.cc b/deps/v8/test/cctest/test-compiler.cc index 6540c5d28..9974ff570 100644 --- a/deps/v8/test/cctest/test-compiler.cc +++ b/deps/v8/test/cctest/test-compiler.cc @@ -36,11 +36,10 @@ using namespace v8::internal; -static MaybeObject* GetGlobalProperty(const char* name) { +static Handle<Object> GetGlobalProperty(const char* name) { Isolate* isolate = CcTest::i_isolate(); - Handle<String> internalized_name = - isolate->factory()->InternalizeUtf8String(name); - return isolate->context()->global_object()->GetProperty(*internalized_name); + return Object::GetProperty( + isolate, isolate->global_object(), name).ToHandleChecked(); } @@ -51,14 +50,14 @@ static void SetGlobalProperty(const char* name, Object* value) { isolate->factory()->InternalizeUtf8String(name); Handle<JSObject> global(isolate->context()->global_object()); Runtime::SetObjectProperty(isolate, global, internalized_name, object, NONE, - SLOPPY); + SLOPPY).Check(); } static Handle<JSFunction> Compile(const char* source) { Isolate* isolate = CcTest::i_isolate(); - Handle<String> source_code( - isolate->factory()->NewStringFromUtf8(CStrVector(source))); + Handle<String> source_code = isolate->factory()->NewStringFromUtf8( + CStrVector(source)).ToHandleChecked(); Handle<SharedFunctionInfo> shared_function = Compiler::CompileScript(source_code, Handle<String>(), @@ -81,11 +80,9 @@ static double Inc(Isolate* isolate, int x) { Handle<JSFunction> fun = Compile(buffer.start()); if (fun.is_null()) return -1; - bool has_pending_exception; Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call(isolate, fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - return GetGlobalProperty("result")->ToObjectChecked()->Number(); + Execution::Call(isolate, fun, global, 0, NULL).Check(); + return GetGlobalProperty("result")->Number(); } @@ -102,11 +99,9 @@ static double Add(Isolate* isolate, int x, int y) { SetGlobalProperty("x", Smi::FromInt(x)); SetGlobalProperty("y", Smi::FromInt(y)); - bool has_pending_exception; Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call(isolate, fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - return GetGlobalProperty("result")->ToObjectChecked()->Number(); + Execution::Call(isolate, fun, global, 0, NULL).Check(); + return GetGlobalProperty("result")->Number(); } @@ -122,11 +117,9 @@ static double Abs(Isolate* isolate, int x) { if (fun.is_null()) return -1; SetGlobalProperty("x", Smi::FromInt(x)); - bool has_pending_exception; Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call(isolate, fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - return GetGlobalProperty("result")->ToObjectChecked()->Number(); + Execution::Call(isolate, fun, global, 0, NULL).Check(); + return GetGlobalProperty("result")->Number(); } @@ -143,11 +136,9 @@ static double Sum(Isolate* isolate, int n) { if (fun.is_null()) return -1; SetGlobalProperty("n", Smi::FromInt(n)); - bool has_pending_exception; Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call(isolate, fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - return GetGlobalProperty("result")->ToObjectChecked()->Number(); + Execution::Call(isolate, fun, global, 0, NULL).Check(); + return GetGlobalProperty("result")->Number(); } @@ -165,11 +156,8 @@ TEST(Print) { const char* source = "for (n = 0; n < 100; ++n) print(n, 1, 2);"; Handle<JSFunction> fun = Compile(source); if (fun.is_null()) return; - bool has_pending_exception; Handle<JSObject> global(CcTest::i_isolate()->context()->global_object()); - Execution::Call( - CcTest::i_isolate(), fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); + Execution::Call(CcTest::i_isolate(), fun, global, 0, NULL).Check(); } @@ -199,12 +187,10 @@ TEST(Stuff) { Handle<JSFunction> fun = Compile(source); CHECK(!fun.is_null()); - bool has_pending_exception; Handle<JSObject> global(CcTest::i_isolate()->context()->global_object()); Execution::Call( - CcTest::i_isolate(), fun, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - CHECK_EQ(511.0, GetGlobalProperty("r")->ToObjectChecked()->Number()); + CcTest::i_isolate(), fun, global, 0, NULL).Check(); + CHECK_EQ(511.0, GetGlobalProperty("r")->Number()); } @@ -215,12 +201,10 @@ TEST(UncaughtThrow) { const char* source = "throw 42;"; Handle<JSFunction> fun = Compile(source); CHECK(!fun.is_null()); - bool has_pending_exception; Isolate* isolate = fun->GetIsolate(); Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call(isolate, fun, global, 0, NULL, &has_pending_exception); - CHECK(has_pending_exception); - CHECK_EQ(42.0, isolate->pending_exception()->ToObjectChecked()->Number()); + CHECK(Execution::Call(isolate, fun, global, 0, NULL).is_null()); + CHECK_EQ(42.0, isolate->pending_exception()->Number()); } @@ -244,17 +228,13 @@ TEST(C2JSFrames) { Isolate* isolate = fun0->GetIsolate(); // Run the generated code to populate the global object with 'foo'. - bool has_pending_exception; Handle<JSObject> global(isolate->context()->global_object()); - Execution::Call( - isolate, fun0, global, 0, NULL, &has_pending_exception); - CHECK(!has_pending_exception); - - Object* foo_string = isolate->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("foo"))->ToObjectChecked(); - MaybeObject* fun1_object = isolate->context()->global_object()-> - GetProperty(String::cast(foo_string)); - Handle<Object> fun1(fun1_object->ToObjectChecked(), isolate); + Execution::Call(isolate, fun0, global, 0, NULL).Check(); + + Handle<String> foo_string = isolate->factory()->InternalizeOneByteString( + STATIC_ASCII_VECTOR("foo")); + Handle<Object> fun1 = Object::GetProperty( + isolate->global_object(), foo_string).ToHandleChecked(); CHECK(fun1->IsJSFunction()); Handle<Object> argv[] = { isolate->factory()->InternalizeOneByteString( @@ -263,9 +243,7 @@ TEST(C2JSFrames) { Handle<JSFunction>::cast(fun1), global, ARRAY_SIZE(argv), - argv, - &has_pending_exception); - CHECK(!has_pending_exception); + argv).Check(); } @@ -279,9 +257,9 @@ TEST(Regression236) { Handle<Script> script = factory->NewScript(factory->empty_string()); script->set_source(CcTest::heap()->undefined_value()); - CHECK_EQ(-1, GetScriptLineNumber(script, 0)); - CHECK_EQ(-1, GetScriptLineNumber(script, 100)); - CHECK_EQ(-1, GetScriptLineNumber(script, -1)); + CHECK_EQ(-1, Script::GetLineNumber(script, 0)); + CHECK_EQ(-1, Script::GetLineNumber(script, 100)); + CHECK_EQ(-1, Script::GetLineNumber(script, -1)); } @@ -312,6 +290,78 @@ TEST(GetScriptLineNumber) { } +TEST(FeedbackVectorPreservedAcrossRecompiles) { + if (i::FLAG_always_opt || !i::FLAG_crankshaft) return; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + if (!CcTest::i_isolate()->use_crankshaft()) return; + v8::HandleScope scope(CcTest::isolate()); + + // Make sure function f has a call that uses a type feedback slot. + CompileRun("function fun() {};" + "fun1 = fun;" + "function f(a) { a(); } f(fun1);"); + + Handle<JSFunction> f = + v8::Utils::OpenHandle( + *v8::Handle<v8::Function>::Cast( + CcTest::global()->Get(v8_str("f")))); + + // We shouldn't have deoptimization support. We want to recompile and + // verify that our feedback vector preserves information. + CHECK(!f->shared()->has_deoptimization_support()); + Handle<FixedArray> feedback_vector(f->shared()->feedback_vector()); + + // Verify that we gathered feedback. + CHECK_EQ(1, feedback_vector->length()); + CHECK(feedback_vector->get(0)->IsJSFunction()); + + CompileRun("%OptimizeFunctionOnNextCall(f); f(fun1);"); + + // Verify that the feedback is still "gathered" despite a recompilation + // of the full code. + CHECK(f->IsOptimized()); + CHECK(f->shared()->has_deoptimization_support()); + CHECK(f->shared()->feedback_vector()->get(0)->IsJSFunction()); +} + + +TEST(FeedbackVectorUnaffectedByScopeChanges) { + if (i::FLAG_always_opt || !i::FLAG_lazy) return; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + + CompileRun("function builder() {" + " call_target = function() { return 3; };" + " return (function() {" + " eval('');" + " return function() {" + " 'use strict';" + " call_target();" + " }" + " })();" + "}" + "morphing_call = builder();"); + + Handle<JSFunction> f = + v8::Utils::OpenHandle( + *v8::Handle<v8::Function>::Cast( + CcTest::global()->Get(v8_str("morphing_call")))); + + // morphing_call should have one feedback vector slot for the call to + // call_target(). + CHECK_EQ(1, f->shared()->feedback_vector()->length()); + // And yet it's not compiled. + CHECK(!f->shared()->is_compiled()); + + CompileRun("morphing_call();"); + + // The vector should have the same size despite the new scoping. + CHECK_EQ(1, f->shared()->feedback_vector()->length()); + CHECK(f->shared()->is_compiled()); +} + + // Test that optimized code for different closures is actually shared // immediately by the FastNewClosureStub when run in the same context. TEST(OptimizedCodeSharing) { diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc index ed0b190f9..6cff7424b 100644 --- a/deps/v8/test/cctest/test-cpu-profiler.cc +++ b/deps/v8/test/cctest/test-cpu-profiler.cc @@ -148,8 +148,7 @@ TEST(CodeEvents) { // Enqueue code creation events. const char* aaa_str = "aaa"; - i::Handle<i::String> aaa_name = factory->NewStringFromAscii( - i::Vector<const char>(aaa_str, i::StrLength(aaa_str))); + i::Handle<i::String> aaa_name = factory->NewStringFromAsciiChecked(aaa_str); profiler.CodeCreateEvent(i::Logger::FUNCTION_TAG, aaa_code, *aaa_name); profiler.CodeCreateEvent(i::Logger::BUILTIN_TAG, comment_code, "comment"); profiler.CodeCreateEvent(i::Logger::STUB_TAG, args5_code, 5); @@ -355,33 +354,33 @@ TEST(DeleteCpuProfile) { CHECK_EQ(0, iprofiler->GetProfilesCount()); v8::Local<v8::String> name1 = v8::String::NewFromUtf8(env->GetIsolate(), "1"); - cpu_profiler->StartCpuProfiling(name1); - const v8::CpuProfile* p1 = cpu_profiler->StopCpuProfiling(name1); + cpu_profiler->StartProfiling(name1); + v8::CpuProfile* p1 = cpu_profiler->StopProfiling(name1); CHECK_NE(NULL, p1); CHECK_EQ(1, iprofiler->GetProfilesCount()); CHECK(FindCpuProfile(cpu_profiler, p1)); - const_cast<v8::CpuProfile*>(p1)->Delete(); + p1->Delete(); CHECK_EQ(0, iprofiler->GetProfilesCount()); v8::Local<v8::String> name2 = v8::String::NewFromUtf8(env->GetIsolate(), "2"); - cpu_profiler->StartCpuProfiling(name2); - const v8::CpuProfile* p2 = cpu_profiler->StopCpuProfiling(name2); + cpu_profiler->StartProfiling(name2); + v8::CpuProfile* p2 = cpu_profiler->StopProfiling(name2); CHECK_NE(NULL, p2); CHECK_EQ(1, iprofiler->GetProfilesCount()); CHECK(FindCpuProfile(cpu_profiler, p2)); v8::Local<v8::String> name3 = v8::String::NewFromUtf8(env->GetIsolate(), "3"); - cpu_profiler->StartCpuProfiling(name3); - const v8::CpuProfile* p3 = cpu_profiler->StopCpuProfiling(name3); + cpu_profiler->StartProfiling(name3); + v8::CpuProfile* p3 = cpu_profiler->StopProfiling(name3); CHECK_NE(NULL, p3); CHECK_EQ(2, iprofiler->GetProfilesCount()); CHECK_NE(p2, p3); CHECK(FindCpuProfile(cpu_profiler, p3)); CHECK(FindCpuProfile(cpu_profiler, p2)); - const_cast<v8::CpuProfile*>(p2)->Delete(); + p2->Delete(); CHECK_EQ(1, iprofiler->GetProfilesCount()); CHECK(!FindCpuProfile(cpu_profiler, p2)); CHECK(FindCpuProfile(cpu_profiler, p3)); - const_cast<v8::CpuProfile*>(p3)->Delete(); + p3->Delete(); CHECK_EQ(0, iprofiler->GetProfilesCount()); } @@ -393,21 +392,21 @@ TEST(ProfileStartEndTime) { v8::Local<v8::String> profile_name = v8::String::NewFromUtf8(env->GetIsolate(), "test"); - cpu_profiler->StartCpuProfiling(profile_name); - const v8::CpuProfile* profile = cpu_profiler->StopCpuProfiling(profile_name); + cpu_profiler->StartProfiling(profile_name); + const v8::CpuProfile* profile = cpu_profiler->StopProfiling(profile_name); CHECK(profile->GetStartTime() <= profile->GetEndTime()); } -static const v8::CpuProfile* RunProfiler( +static v8::CpuProfile* RunProfiler( v8::Handle<v8::Context> env, v8::Handle<v8::Function> function, v8::Handle<v8::Value> argv[], int argc, - unsigned min_js_samples) { + unsigned min_js_samples, bool collect_samples = false) { v8::CpuProfiler* cpu_profiler = env->GetIsolate()->GetCpuProfiler(); v8::Local<v8::String> profile_name = v8::String::NewFromUtf8(env->GetIsolate(), "my_profile"); - cpu_profiler->StartCpuProfiling(profile_name); + cpu_profiler->StartProfiling(profile_name, collect_samples); i::Sampler* sampler = reinterpret_cast<i::Isolate*>(env->GetIsolate())->logger()->sampler(); @@ -416,12 +415,11 @@ static const v8::CpuProfile* RunProfiler( function->Call(env->Global(), argc, argv); } while (sampler->js_and_external_sample_count() < min_js_samples); - const v8::CpuProfile* profile = cpu_profiler->StopCpuProfiling(profile_name); + v8::CpuProfile* profile = cpu_profiler->StopProfiling(profile_name); CHECK_NE(NULL, profile); // Dump collected profile to have a better diagnostic in case of failure. - reinterpret_cast<i::CpuProfile*>( - const_cast<v8::CpuProfile*>(profile))->Print(); + reinterpret_cast<i::CpuProfile*>(profile)->Print(); return profile; } @@ -553,7 +551,7 @@ TEST(CollectCpuProfile) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), profiling_interval_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 200); function->Call(env->Global(), ARRAY_SIZE(args), args); @@ -585,10 +583,41 @@ TEST(CollectCpuProfile) { CheckSimpleBranch(env->GetIsolate(), fooNode, delayBranch, ARRAY_SIZE(delayBranch)); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } +TEST(CollectCpuProfileSamples) { + LocalContext env; + v8::HandleScope scope(env->GetIsolate()); + + v8::Script::Compile(v8::String::NewFromUtf8(env->GetIsolate(), + cpu_profiler_test_source))->Run(); + v8::Local<v8::Function> function = v8::Local<v8::Function>::Cast( + env->Global()->Get(v8::String::NewFromUtf8(env->GetIsolate(), "start"))); + + int32_t profiling_interval_ms = 200; + v8::Handle<v8::Value> args[] = { + v8::Integer::New(env->GetIsolate(), profiling_interval_ms) + }; + v8::CpuProfile* profile = + RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 200, true); + + CHECK_LE(200, profile->GetSamplesCount()); + uint64_t end_time = profile->GetEndTime(); + uint64_t current_time = profile->GetStartTime(); + CHECK_LE(current_time, end_time); + for (int i = 0; i < profile->GetSamplesCount(); i++) { + CHECK_NE(NULL, profile->GetSample(i)); + uint64_t timestamp = profile->GetSampleTimestamp(i); + CHECK_LE(current_time, timestamp); + CHECK_LE(timestamp, end_time); + current_time = timestamp; + } + + profile->Delete(); +} + static const char* cpu_profiler_test_source2 = "function loop() {}\n" "function delay() { loop(); }\n" @@ -627,7 +656,7 @@ TEST(SampleWhenFrameIsNotSetup) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), repeat_count) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -654,7 +683,7 @@ TEST(SampleWhenFrameIsNotSetup) { } } - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -747,7 +776,7 @@ TEST(NativeAccessorUninitializedIC) { int32_t repeat_count = 1; v8::Handle<v8::Value> args[] = { v8::Integer::New(isolate, repeat_count) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 180); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -756,7 +785,7 @@ TEST(NativeAccessorUninitializedIC) { GetChild(isolate, startNode, "get foo"); GetChild(isolate, startNode, "set foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -804,7 +833,7 @@ TEST(NativeAccessorMonomorphicIC) { int32_t repeat_count = 100; v8::Handle<v8::Value> args[] = { v8::Integer::New(isolate, repeat_count) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 200); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -813,7 +842,7 @@ TEST(NativeAccessorMonomorphicIC) { GetChild(isolate, startNode, "get foo"); GetChild(isolate, startNode, "set foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -858,7 +887,7 @@ TEST(NativeMethodUninitializedIC) { int32_t repeat_count = 1; v8::Handle<v8::Value> args[] = { v8::Integer::New(isolate, repeat_count) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -866,7 +895,7 @@ TEST(NativeMethodUninitializedIC) { GetChild(isolate, root, "start"); GetChild(isolate, startNode, "fooMethod"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -915,7 +944,7 @@ TEST(NativeMethodMonomorphicIC) { int32_t repeat_count = 100; v8::Handle<v8::Value> args[] = { v8::Integer::New(isolate, repeat_count) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -924,7 +953,7 @@ TEST(NativeMethodMonomorphicIC) { GetChild(isolate, root, "start"); GetChild(isolate, startNode, "fooMethod"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -956,7 +985,7 @@ TEST(BoundFunctionCall) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -973,7 +1002,7 @@ TEST(BoundFunctionCall) { GetChild(env->GetIsolate(), root, "start"); GetChild(env->GetIsolate(), startNode, "foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1017,7 +1046,7 @@ TEST(FunctionCallSample) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -1056,7 +1085,7 @@ TEST(FunctionCallSample) { CheckChildrenNames(unresolvedNode, names); } - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1100,7 +1129,7 @@ TEST(FunctionApplySample) { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env.local(), function, args, ARRAY_SIZE(args), 100); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -1145,7 +1174,7 @@ TEST(FunctionApplySample) { } } - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1207,7 +1236,7 @@ TEST(JsNativeJsSample) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env, function, args, ARRAY_SIZE(args), 10); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -1234,7 +1263,7 @@ TEST(JsNativeJsSample) { CHECK_EQ(1, barNode->GetChildrenCount()); GetChild(env->GetIsolate(), barNode, "foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1292,7 +1321,7 @@ TEST(JsNativeJsRuntimeJsSample) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env, function, args, ARRAY_SIZE(args), 10); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -1317,7 +1346,7 @@ TEST(JsNativeJsRuntimeJsSample) { CHECK_EQ(1, barNode->GetChildrenCount()); GetChild(env->GetIsolate(), barNode, "foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1386,7 +1415,7 @@ TEST(JsNative1JsNative2JsSample) { v8::Handle<v8::Value> args[] = { v8::Integer::New(env->GetIsolate(), duration_ms) }; - const v8::CpuProfile* profile = + v8::CpuProfile* profile = RunProfiler(env, function, args, ARRAY_SIZE(args), 10); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); @@ -1415,7 +1444,7 @@ TEST(JsNative1JsNative2JsSample) { CHECK_EQ(1, nativeNode2->GetChildrenCount()); GetChild(env->GetIsolate(), nativeNode2, "foo"); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1430,7 +1459,7 @@ TEST(IdleTime) { v8::Local<v8::String> profile_name = v8::String::NewFromUtf8(env->GetIsolate(), "my_profile"); - cpu_profiler->StartCpuProfiling(profile_name); + cpu_profiler->StartProfiling(profile_name); i::Isolate* isolate = CcTest::i_isolate(); i::ProfilerEventsProcessor* processor = isolate->cpu_profiler()->processor(); @@ -1446,11 +1475,10 @@ TEST(IdleTime) { processor->AddCurrentStack(isolate); - const v8::CpuProfile* profile = cpu_profiler->StopCpuProfiling(profile_name); + v8::CpuProfile* profile = cpu_profiler->StopProfiling(profile_name); CHECK_NE(NULL, profile); // Dump collected profile to have a better diagnostic in case of failure. - reinterpret_cast<i::CpuProfile*>( - const_cast<v8::CpuProfile*>(profile))->Print(); + reinterpret_cast<i::CpuProfile*>(profile)->Print(); const v8::CpuProfileNode* root = profile->GetTopDownRoot(); ScopedVector<v8::Handle<v8::String> > names(3); @@ -1472,7 +1500,7 @@ TEST(IdleTime) { CHECK_EQ(0, idleNode->GetChildrenCount()); CHECK_GE(idleNode->GetHitCount(), 3); - const_cast<v8::CpuProfile*>(profile)->Delete(); + profile->Delete(); } @@ -1545,24 +1573,24 @@ TEST(DontStopOnFinishedProfileDelete) { CHECK_EQ(0, iprofiler->GetProfilesCount()); v8::Handle<v8::String> outer = v8::String::NewFromUtf8(isolate, "outer"); - profiler->StartCpuProfiling(outer); + profiler->StartProfiling(outer); CHECK_EQ(0, iprofiler->GetProfilesCount()); v8::Handle<v8::String> inner = v8::String::NewFromUtf8(isolate, "inner"); - profiler->StartCpuProfiling(inner); + profiler->StartProfiling(inner); CHECK_EQ(0, iprofiler->GetProfilesCount()); - const v8::CpuProfile* inner_profile = profiler->StopCpuProfiling(inner); + v8::CpuProfile* inner_profile = profiler->StopProfiling(inner); CHECK(inner_profile); CHECK_EQ(1, iprofiler->GetProfilesCount()); - const_cast<v8::CpuProfile*>(inner_profile)->Delete(); + inner_profile->Delete(); inner_profile = NULL; CHECK_EQ(0, iprofiler->GetProfilesCount()); - const v8::CpuProfile* outer_profile = profiler->StopCpuProfiling(outer); + v8::CpuProfile* outer_profile = profiler->StopProfiling(outer); CHECK(outer_profile); CHECK_EQ(1, iprofiler->GetProfilesCount()); - const_cast<v8::CpuProfile*>(outer_profile)->Delete(); + outer_profile->Delete(); outer_profile = NULL; CHECK_EQ(0, iprofiler->GetProfilesCount()); } diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc index b51cb7724..85e4512ea 100644 --- a/deps/v8/test/cctest/test-debug.cc +++ b/deps/v8/test/cctest/test-debug.cc @@ -25,8 +25,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#ifdef ENABLE_DEBUGGER_SUPPORT - #include <stdlib.h> #include "v8.h" @@ -114,7 +112,7 @@ class DebugLocalContext { v8::internal::Runtime::SetObjectProperty(isolate, global, debug_string, Handle<Object>(debug->debug_context()->global_proxy(), isolate), DONT_ENUM, - ::v8::internal::SLOPPY); + ::v8::internal::SLOPPY).Check(); } private: @@ -5409,8 +5407,6 @@ void BreakpointsDebuggerThread::Run() { void TestRecursiveBreakpointsGeneric(bool global_evaluate) { - i::FLAG_debugger_auto_break = true; - BreakpointsDebuggerThread breakpoints_debugger_thread(global_evaluate); BreakpointsV8Thread breakpoints_v8_thread; @@ -5881,7 +5877,6 @@ void HostDispatchDebuggerThread::Run() { TEST(DebuggerHostDispatch) { HostDispatchDebuggerThread host_dispatch_debugger_thread; HostDispatchV8Thread host_dispatch_v8_thread; - i::FLAG_debugger_auto_break = true; // Create a V8 environment Barriers stack_allocated_host_dispatch_barriers; @@ -5948,8 +5943,6 @@ TEST(DebuggerDebugMessageDispatch) { DebugMessageDispatchDebuggerThread debug_message_dispatch_debugger_thread; DebugMessageDispatchV8Thread debug_message_dispatch_v8_thread; - i::FLAG_debugger_auto_break = true; - // Create a V8 environment Barriers stack_allocated_debug_message_dispatch_barriers; debug_message_dispatch_barriers = @@ -7515,7 +7508,7 @@ static void DebugBreakInlineListener( OS::SNPrintF(script_vector, "%%GetFrameDetails(%d, %d)[5]", break_id, i); v8::Local<v8::Value> result = CompileRun(script); CHECK_EQ(expected_line_number[i], - i::GetScriptLineNumber(source_script, result->Int32Value())); + i::Script::GetLineNumber(source_script, result->Int32Value())); } v8::Debug::SetDebugEventListener2(NULL); v8::V8::TerminateExecution(CcTest::isolate()); @@ -7668,4 +7661,32 @@ TEST(PrecompiledFunction) { } -#endif // ENABLE_DEBUGGER_SUPPORT +static void DebugBreakStackTraceListener( + const v8::Debug::EventDetails& event_details) { + v8::StackTrace::CurrentStackTrace(CcTest::isolate(), 10); +} + + +static void AddDebugBreak(const v8::FunctionCallbackInfo<v8::Value>& args) { + v8::Debug::DebugBreak(args.GetIsolate()); +} + + +TEST(DebugBreakStackTrace) { + DebugLocalContext env; + v8::HandleScope scope(env->GetIsolate()); + v8::Debug::SetDebugEventListener2(DebugBreakStackTraceListener); + v8::Handle<v8::FunctionTemplate> add_debug_break_template = + v8::FunctionTemplate::New(env->GetIsolate(), AddDebugBreak); + v8::Handle<v8::Function> add_debug_break = + add_debug_break_template->GetFunction(); + env->Global()->Set(v8_str("add_debug_break"), add_debug_break); + + CompileRun("(function loop() {" + " for (var j = 0; j < 1000; j++) {" + " for (var i = 0; i < 1000; i++) {" + " if (i == 999) add_debug_break();" + " }" + " }" + "})()"); +} diff --git a/deps/v8/test/cctest/test-dictionary.cc b/deps/v8/test/cctest/test-dictionary.cc index 6e62a2243..aa1bc8623 100644 --- a/deps/v8/test/cctest/test-dictionary.cc +++ b/deps/v8/test/cctest/test-dictionary.cc @@ -38,46 +38,46 @@ using namespace v8::internal; +namespace { -TEST(ObjectHashTable) { - LocalContext context; + +template<typename HashMap> +static void TestHashMap(Handle<HashMap> table) { Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - Handle<ObjectHashTable> table = factory->NewObjectHashTable(23); + Handle<JSObject> a = factory->NewJSArray(7); Handle<JSObject> b = factory->NewJSArray(11); - table = ObjectHashTable::Put(table, a, b); + table = HashMap::Put(table, a, b); CHECK_EQ(table->NumberOfElements(), 1); - CHECK_EQ(table->Lookup(*a), *b); - CHECK_EQ(table->Lookup(*b), CcTest::heap()->the_hole_value()); + CHECK_EQ(table->Lookup(a), *b); + CHECK_EQ(table->Lookup(b), CcTest::heap()->the_hole_value()); // Keys still have to be valid after objects were moved. CcTest::heap()->CollectGarbage(NEW_SPACE); CHECK_EQ(table->NumberOfElements(), 1); - CHECK_EQ(table->Lookup(*a), *b); - CHECK_EQ(table->Lookup(*b), CcTest::heap()->the_hole_value()); + CHECK_EQ(table->Lookup(a), *b); + CHECK_EQ(table->Lookup(b), CcTest::heap()->the_hole_value()); // Keys that are overwritten should not change number of elements. - table = ObjectHashTable::Put(table, a, factory->NewJSArray(13)); + table = HashMap::Put(table, a, factory->NewJSArray(13)); CHECK_EQ(table->NumberOfElements(), 1); - CHECK_NE(table->Lookup(*a), *b); + CHECK_NE(table->Lookup(a), *b); // Keys mapped to the hole should be removed permanently. - table = ObjectHashTable::Put(table, a, factory->the_hole_value()); + table = HashMap::Put(table, a, factory->the_hole_value()); CHECK_EQ(table->NumberOfElements(), 0); - CHECK_EQ(table->NumberOfDeletedElements(), 1); - CHECK_EQ(table->Lookup(*a), CcTest::heap()->the_hole_value()); + CHECK_EQ(table->Lookup(a), CcTest::heap()->the_hole_value()); // Keys should map back to their respective values and also should get // an identity hash code generated. for (int i = 0; i < 100; i++) { Handle<JSReceiver> key = factory->NewJSArray(7); Handle<JSObject> value = factory->NewJSArray(11); - table = ObjectHashTable::Put(table, key, value); + table = HashMap::Put(table, key, value); CHECK_EQ(table->NumberOfElements(), i + 1); - CHECK_NE(table->FindEntry(*key), ObjectHashTable::kNotFound); - CHECK_EQ(table->Lookup(*key), *value); + CHECK_NE(table->FindEntry(key), HashMap::kNotFound); + CHECK_EQ(table->Lookup(key), *value); CHECK(key->GetIdentityHash()->IsSmi()); } @@ -86,8 +86,8 @@ TEST(ObjectHashTable) { for (int i = 0; i < 100; i++) { Handle<JSReceiver> key = factory->NewJSArray(7); CHECK(JSReceiver::GetOrCreateIdentityHash(key)->IsSmi()); - CHECK_EQ(table->FindEntry(*key), ObjectHashTable::kNotFound); - CHECK_EQ(table->Lookup(*key), CcTest::heap()->the_hole_value()); + CHECK_EQ(table->FindEntry(key), HashMap::kNotFound); + CHECK_EQ(table->Lookup(key), CcTest::heap()->the_hole_value()); CHECK(key->GetIdentityHash()->IsSmi()); } @@ -95,13 +95,22 @@ TEST(ObjectHashTable) { // should not get an identity hash code generated. for (int i = 0; i < 100; i++) { Handle<JSReceiver> key = factory->NewJSArray(7); - CHECK_EQ(table->Lookup(*key), CcTest::heap()->the_hole_value()); + CHECK_EQ(table->Lookup(key), CcTest::heap()->the_hole_value()); CHECK_EQ(key->GetIdentityHash(), CcTest::heap()->undefined_value()); } } +TEST(HashMap) { + LocalContext context; + v8::HandleScope scope(context->GetIsolate()); + Isolate* isolate = CcTest::i_isolate(); + TestHashMap(ObjectHashTable::New(isolate, 23)); + TestHashMap(isolate->factory()->NewOrderedHashMap()); +} + + class ObjectHashTableTest: public ObjectHashTable { public: void insert(int entry, int key, int value) { @@ -110,7 +119,8 @@ class ObjectHashTableTest: public ObjectHashTable { } int lookup(int key) { - return Smi::cast(Lookup(Smi::FromInt(key)))->value(); + Handle<Object> key_obj(Smi::FromInt(key), GetIsolate()); + return Smi::cast(Lookup(key_obj))->value(); } int capacity() { @@ -122,30 +132,29 @@ class ObjectHashTableTest: public ObjectHashTable { TEST(HashTableRehash) { LocalContext context; Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); v8::HandleScope scope(context->GetIsolate()); // Test almost filled table. { - Handle<ObjectHashTable> table = factory->NewObjectHashTable(100); + Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100); ObjectHashTableTest* t = reinterpret_cast<ObjectHashTableTest*>(*table); int capacity = t->capacity(); for (int i = 0; i < capacity - 1; i++) { t->insert(i, i * i, i); } - t->Rehash(Smi::FromInt(0)); + t->Rehash(handle(Smi::FromInt(0), isolate)); for (int i = 0; i < capacity - 1; i++) { CHECK_EQ(i, t->lookup(i * i)); } } // Test half-filled table. { - Handle<ObjectHashTable> table = factory->NewObjectHashTable(100); + Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100); ObjectHashTableTest* t = reinterpret_cast<ObjectHashTableTest*>(*table); int capacity = t->capacity(); for (int i = 0; i < capacity / 2; i++) { t->insert(i, i * i, i); } - t->Rehash(Smi::FromInt(0)); + t->Rehash(handle(Smi::FromInt(0), isolate)); for (int i = 0; i < capacity / 2; i++) { CHECK_EQ(i, t->lookup(i * i)); } @@ -154,13 +163,11 @@ TEST(HashTableRehash) { #ifdef DEBUG -TEST(ObjectHashSetCausesGC) { - i::FLAG_stress_compaction = false; - LocalContext context; +template<class HashSet> +static void TestHashSetCausesGC(Handle<HashSet> table) { Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - Handle<ObjectHashSet> table = factory->NewObjectHashSet(1); + Handle<JSObject> key = factory->NewJSArray(0); v8::Handle<v8::Object> key_obj = v8::Utils::ToLocal(key); @@ -176,28 +183,35 @@ TEST(ObjectHashSetCausesGC) { // Calling Contains() should not cause GC ever. int gc_count = isolate->heap()->gc_count(); - CHECK(!table->Contains(*key)); + CHECK(!table->Contains(key)); CHECK(gc_count == isolate->heap()->gc_count()); // Calling Remove() will not cause GC in this case. - table = ObjectHashSet::Remove(table, key); + table = HashSet::Remove(table, key); CHECK(gc_count == isolate->heap()->gc_count()); // Calling Add() should cause GC. - table = ObjectHashSet::Add(table, key); + table = HashSet::Add(table, key); CHECK(gc_count < isolate->heap()->gc_count()); } -#endif -#ifdef DEBUG -TEST(ObjectHashTableCausesGC) { +TEST(ObjectHashSetCausesGC) { i::FLAG_stress_compaction = false; LocalContext context; + v8::HandleScope scope(context->GetIsolate()); + Isolate* isolate = CcTest::i_isolate(); + TestHashSetCausesGC(isolate->factory()->NewOrderedHashSet()); +} +#endif + + +#ifdef DEBUG +template<class HashMap> +static void TestHashMapCausesGC(Handle<HashMap> table) { Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - Handle<ObjectHashTable> table = factory->NewObjectHashTable(1); + Handle<JSObject> key = factory->NewJSArray(0); v8::Handle<v8::Object> key_obj = v8::Utils::ToLocal(key); @@ -212,11 +226,24 @@ TEST(ObjectHashTableCausesGC) { SimulateFullSpace(CcTest::heap()->old_pointer_space()); // Calling Lookup() should not cause GC ever. - CHECK(table->Lookup(*key)->IsTheHole()); + CHECK(table->Lookup(key)->IsTheHole()); // Calling Put() should request GC by returning a failure. int gc_count = isolate->heap()->gc_count(); - ObjectHashTable::Put(table, key, key); + HashMap::Put(table, key, key); CHECK(gc_count < isolate->heap()->gc_count()); } + + +TEST(ObjectHashTableCausesGC) { + i::FLAG_stress_compaction = false; + LocalContext context; + v8::HandleScope scope(context->GetIsolate()); + Isolate* isolate = CcTest::i_isolate(); + TestHashMapCausesGC(ObjectHashTable::New(isolate, 1)); + TestHashMapCausesGC(isolate->factory()->NewOrderedHashMap()); +} #endif + + +} diff --git a/deps/v8/test/cctest/test-disasm-arm.cc b/deps/v8/test/cctest/test-disasm-arm.cc index 5eff4206c..24453bc88 100644 --- a/deps/v8/test/cctest/test-disasm-arm.cc +++ b/deps/v8/test/cctest/test-disasm-arm.cc @@ -592,8 +592,8 @@ TEST(Vfp) { "eeb80be0 vcvt.f64.s32 d0, s1"); COMPARE(vcvt_f32_s32(s0, s2), "eeb80ac1 vcvt.f32.s32 s0, s2"); - COMPARE(vcvt_f64_s32(d0, 1), - "eeba0bef vcvt.f64.s32 d0, d0, #1"); + COMPARE(vcvt_f64_s32(d0, 2), + "eeba0bcf vcvt.f64.s32 d0, d0, #2"); if (CpuFeatures::IsSupported(VFP32DREGS)) { COMPARE(vmov(d3, d27), diff --git a/deps/v8/test/cctest/test-disasm-arm64.cc b/deps/v8/test/cctest/test-disasm-arm64.cc index 3343175e9..23f7b6daf 100644 --- a/deps/v8/test/cctest/test-disasm-arm64.cc +++ b/deps/v8/test/cctest/test-disasm-arm64.cc @@ -1605,13 +1605,13 @@ TEST_(debug) { // All debug codes should produce the same instruction, and the debug code // can be any uint32_t. - COMPARE(debug("message", 0, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 1, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 0xffff, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 0x10000, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 0x7fffffff, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 0x80000000u, NO_PARAM), "hlt #0xdeb0"); - COMPARE(debug("message", 0xffffffffu, NO_PARAM), "hlt #0xdeb0"); + COMPARE(debug("message", 0, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 1, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 0xffff, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 0x10000, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 0x7fffffff, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 0x80000000u, BREAK), "hlt #0xdeb0"); + COMPARE(debug("message", 0xffffffffu, BREAK), "hlt #0xdeb0"); CLEANUP(); } diff --git a/deps/v8/test/cctest/test-disasm-ia32.cc b/deps/v8/test/cctest/test-disasm-ia32.cc index 7ca95f6c9..6972aeaba 100644 --- a/deps/v8/test/cctest/test-disasm-ia32.cc +++ b/deps/v8/test/cctest/test-disasm-ia32.cc @@ -274,11 +274,9 @@ TEST(DisasmIa320) { __ jmp(&L1); __ jmp(Operand(ebx, ecx, times_4, 10000)); -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference after_break_target = ExternalReference(Debug_Address::AfterBreakTarget(), isolate); __ jmp(Operand::StaticVariable(after_break_target)); -#endif // ENABLE_DEBUGGER_SUPPORT __ jmp(ic, RelocInfo::CODE_TARGET); __ nop(); @@ -462,15 +460,13 @@ TEST(DisasmIa320) { CodeDesc desc; assm.GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + USE(code); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); - byte* begin = Code::cast(code)->instruction_start(); - byte* end = begin + Code::cast(code)->instruction_size(); + code->Print(); + byte* begin = code->instruction_start(); + byte* end = begin + code->instruction_size(); disasm::Disassembler::Disassemble(stdout, begin, end); #endif } diff --git a/deps/v8/test/cctest/test-disasm-x64.cc b/deps/v8/test/cctest/test-disasm-x64.cc index 5ca12b943..3b1f8af82 100644 --- a/deps/v8/test/cctest/test-disasm-x64.cc +++ b/deps/v8/test/cctest/test-disasm-x64.cc @@ -104,7 +104,9 @@ TEST(DisasmX64) { __ xorq(rdx, Immediate(3)); __ nop(); __ cpuid(); + __ movsxbl(rdx, Operand(rcx, 0)); __ movsxbq(rdx, Operand(rcx, 0)); + __ movsxwl(rdx, Operand(rcx, 0)); __ movsxwq(rdx, Operand(rcx, 0)); __ movzxbl(rdx, Operand(rcx, 0)); __ movzxwl(rdx, Operand(rcx, 0)); @@ -179,22 +181,22 @@ TEST(DisasmX64) { __ nop(); - __ rcl(rdx, Immediate(1)); - __ rcl(rdx, Immediate(7)); - __ rcr(rdx, Immediate(1)); - __ rcr(rdx, Immediate(7)); - __ sar(rdx, Immediate(1)); - __ sar(rdx, Immediate(6)); - __ sar_cl(rdx); + __ rclq(rdx, Immediate(1)); + __ rclq(rdx, Immediate(7)); + __ rcrq(rdx, Immediate(1)); + __ rcrq(rdx, Immediate(7)); + __ sarq(rdx, Immediate(1)); + __ sarq(rdx, Immediate(6)); + __ sarq_cl(rdx); __ sbbq(rdx, rbx); __ shld(rdx, rbx); - __ shl(rdx, Immediate(1)); - __ shl(rdx, Immediate(6)); - __ shl_cl(rdx); + __ shlq(rdx, Immediate(1)); + __ shlq(rdx, Immediate(6)); + __ shlq_cl(rdx); __ shrd(rdx, rbx); - __ shr(rdx, Immediate(1)); - __ shr(rdx, Immediate(7)); - __ shr_cl(rdx); + __ shrq(rdx, Immediate(1)); + __ shrq(rdx, Immediate(7)); + __ shrq_cl(rdx); // Immediates @@ -258,11 +260,9 @@ TEST(DisasmX64) { __ jmp(&L1); // TODO(mstarzinger): The following is protected. // __ jmp(Operand(rbx, rcx, times_4, 10000)); -#ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference after_break_target = ExternalReference(Debug_Address::AfterBreakTarget(), isolate); USE(after_break_target); -#endif // ENABLE_DEBUGGER_SUPPORT __ jmp(ic, RelocInfo::CODE_TARGET); __ nop(); @@ -429,15 +429,13 @@ TEST(DisasmX64) { CodeDesc desc; assm.GetCode(&desc); - Object* code = CcTest::heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + USE(code); #ifdef OBJECT_PRINT - Code::cast(code)->Print(); - byte* begin = Code::cast(code)->instruction_start(); - byte* end = begin + Code::cast(code)->instruction_size(); + code->Print(); + byte* begin = code->instruction_start(); + byte* end = begin + code->instruction_size(); disasm::Disassembler::Disassemble(stdout, begin, end); #endif } diff --git a/deps/v8/test/cctest/test-func-name-inference.cc b/deps/v8/test/cctest/test-func-name-inference.cc index d7c508305..f452b3ed3 100644 --- a/deps/v8/test/cctest/test-func-name-inference.cc +++ b/deps/v8/test/cctest/test-func-name-inference.cc @@ -70,14 +70,13 @@ static void CheckFunctionName(v8::Handle<v8::Script> script, // Find the position of a given func source substring in the source. Handle<String> func_pos_str = - factory->NewStringFromAscii(CStrVector(func_pos_src)); + factory->NewStringFromAsciiChecked(func_pos_src); int func_pos = Runtime::StringMatch(isolate, script_src, func_pos_str, 0); CHECK_NE(0, func_pos); -#ifdef ENABLE_DEBUGGER_SUPPORT // Obtain SharedFunctionInfo for the function. isolate->debug()->PrepareForBreakPoints(); Object* shared_func_info_ptr = @@ -90,7 +89,6 @@ static void CheckFunctionName(v8::Handle<v8::Script> script, SmartArrayPointer<char> inferred_name = shared_func_info->inferred_name()->ToCString(); CHECK_EQ(ref_inferred_name, inferred_name.get()); -#endif // ENABLE_DEBUGGER_SUPPORT } diff --git a/deps/v8/test/cctest/test-global-handles.cc b/deps/v8/test/cctest/test-global-handles.cc index 48b6655bb..1ab90ec5e 100644 --- a/deps/v8/test/cctest/test-global-handles.cc +++ b/deps/v8/test/cctest/test-global-handles.cc @@ -86,19 +86,19 @@ class TestObjectVisitor : public ObjectVisitor { TEST(IterateObjectGroupsOldApi) { CcTest::InitializeVM(); - GlobalHandles* global_handles = CcTest::i_isolate()->global_handles(); - Heap* heap = CcTest::heap(); + Isolate* isolate = CcTest::i_isolate(); + GlobalHandles* global_handles = isolate->global_handles(); v8::HandleScope handle_scope(CcTest::isolate()); Handle<Object> g1s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g1s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); TestRetainedObjectInfo info1; TestRetainedObjectInfo info2; @@ -181,20 +181,20 @@ TEST(IterateObjectGroupsOldApi) { TEST(IterateObjectGroups) { CcTest::InitializeVM(); - GlobalHandles* global_handles = CcTest::i_isolate()->global_handles(); - Heap* heap = CcTest::heap(); + Isolate* isolate = CcTest::i_isolate(); + GlobalHandles* global_handles = isolate->global_handles(); v8::HandleScope handle_scope(CcTest::isolate()); Handle<Object> g1s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g1s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); TestRetainedObjectInfo info1; TestRetainedObjectInfo info2; @@ -276,25 +276,25 @@ TEST(IterateObjectGroups) { TEST(ImplicitReferences) { CcTest::InitializeVM(); - GlobalHandles* global_handles = CcTest::i_isolate()->global_handles(); - Heap* heap = CcTest::heap(); + Isolate* isolate = CcTest::i_isolate(); + GlobalHandles* global_handles = isolate->global_handles(); v8::HandleScope handle_scope(CcTest::isolate()); Handle<Object> g1s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g1c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g1c2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); Handle<Object> g2c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(*isolate->factory()->NewFixedArray(1)); global_handles->SetObjectGroupId(g1s1.location(), UniqueId(1)); global_handles->SetObjectGroupId(g2s1.location(), UniqueId(2)); diff --git a/deps/v8/test/cctest/test-hashing.cc b/deps/v8/test/cctest/test-hashing.cc index 66ee04158..9a7d61ddd 100644 --- a/deps/v8/test/cctest/test-hashing.cc +++ b/deps/v8/test/cctest/test-hashing.cc @@ -203,7 +203,8 @@ void check(i::Vector<const uint8_t> string) { CHECK(code->IsCode()); HASH_FUNCTION hash = FUNCTION_CAST<HASH_FUNCTION>(code->entry()); - Handle<String> v8_string = factory->NewStringFromOneByte(string); + Handle<String> v8_string = + factory->NewStringFromOneByte(string).ToHandleChecked(); v8_string->set_hash_field(String::kEmptyHashField); #ifdef USE_SIMULATOR uint32_t codegen_hash = static_cast<uint32_t>( diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc index f1ccc571d..eeafc7093 100644 --- a/deps/v8/test/cctest/test-heap-profiler.cc +++ b/deps/v8/test/cctest/test-heap-profiler.cc @@ -441,11 +441,10 @@ TEST(HeapSnapshotConsString) { CHECK_EQ(1, global->InternalFieldCount()); i::Factory* factory = CcTest::i_isolate()->factory(); - i::Handle<i::String> first = - factory->NewStringFromAscii(i::CStrVector("0123456789")); - i::Handle<i::String> second = - factory->NewStringFromAscii(i::CStrVector("0123456789")); - i::Handle<i::String> cons_string = factory->NewConsString(first, second); + i::Handle<i::String> first = factory->NewStringFromStaticAscii("0123456789"); + i::Handle<i::String> second = factory->NewStringFromStaticAscii("0123456789"); + i::Handle<i::String> cons_string = + factory->NewConsString(first, second).ToHandleChecked(); global->SetInternalField(0, v8::ToApiHandle<v8::String>(cons_string)); @@ -1884,7 +1883,6 @@ TEST(SfiAndJsFunctionWeakRefs) { } -#ifdef ENABLE_DEBUGGER_SUPPORT TEST(NoDebugObjectInSnapshot) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1909,7 +1907,6 @@ TEST(NoDebugObjectInSnapshot) { } CHECK_EQ(1, globals_count); } -#endif // ENABLE_DEBUGGER_SUPPORT TEST(AllStrongGcRootsHaveNames) { @@ -2487,8 +2484,7 @@ TEST(BoxObject) { v8::Handle<v8::Object> global = global_proxy->GetPrototype().As<v8::Object>(); i::Factory* factory = CcTest::i_isolate()->factory(); - i::Handle<i::String> string = - factory->NewStringFromAscii(i::CStrVector("string")); + i::Handle<i::String> string = factory->NewStringFromStaticAscii("string"); i::Handle<i::Object> box = factory->NewBox(string); global->Set(0, v8::ToApiHandle<v8::Object>(box)); @@ -2508,6 +2504,41 @@ TEST(BoxObject) { } +TEST(WeakContainers) { + i::FLAG_allow_natives_syntax = true; + LocalContext env; + v8::HandleScope scope(env->GetIsolate()); + if (!CcTest::i_isolate()->use_crankshaft()) return; + v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); + CompileRun( + "function foo(a) { return a.x; }\n" + "obj = {x : 123};\n" + "foo(obj);\n" + "foo(obj);\n" + "%OptimizeFunctionOnNextCall(foo);\n" + "foo(obj);\n"); + const v8::HeapSnapshot* snapshot = + heap_profiler->TakeHeapSnapshot(v8_str("snapshot")); + CHECK(ValidateSnapshot(snapshot)); + const v8::HeapGraphNode* global = GetGlobalObject(snapshot); + const v8::HeapGraphNode* obj = + GetProperty(global, v8::HeapGraphEdge::kProperty, "obj"); + CHECK_NE(NULL, obj); + const v8::HeapGraphNode* map = + GetProperty(obj, v8::HeapGraphEdge::kInternal, "map"); + CHECK_NE(NULL, map); + const v8::HeapGraphNode* dependent_code = + GetProperty(map, v8::HeapGraphEdge::kInternal, "dependent_code"); + if (!dependent_code) return; + int count = dependent_code->GetChildrenCount(); + CHECK_NE(0, count); + for (int i = 0; i < count; ++i) { + const v8::HeapGraphEdge* prop = dependent_code->GetChild(i); + CHECK_EQ(v8::HeapGraphEdge::kWeak, prop->GetType()); + } +} + + static inline i::Address ToAddress(int n) { return reinterpret_cast<i::Address>(n); } diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc index c1f20f1f0..913d80a18 100644 --- a/deps/v8/test/cctest/test-heap.cc +++ b/deps/v8/test/cctest/test-heap.cc @@ -83,31 +83,27 @@ TEST(HeapMaps) { static void CheckOddball(Isolate* isolate, Object* obj, const char* string) { CHECK(obj->IsOddball()); - bool exc; Handle<Object> handle(obj, isolate); Object* print_string = - *Execution::ToString(isolate, handle, &exc); + *Execution::ToString(isolate, handle).ToHandleChecked(); CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string))); } static void CheckSmi(Isolate* isolate, int value, const char* string) { - bool exc; Handle<Object> handle(Smi::FromInt(value), isolate); Object* print_string = - *Execution::ToString(isolate, handle, &exc); + *Execution::ToString(isolate, handle).ToHandleChecked(); CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string))); } static void CheckNumber(Isolate* isolate, double value, const char* string) { - Object* obj = CcTest::heap()->NumberFromDouble(value)->ToObjectChecked(); - CHECK(obj->IsNumber()); - bool exc; - Handle<Object> handle(obj, isolate); - Object* print_string = - *Execution::ToString(isolate, handle, &exc); - CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string))); + Handle<Object> number = isolate->factory()->NewNumber(value); + CHECK(number->IsNumber()); + Handle<Object> print_string = + Execution::ToString(isolate, number).ToHandleChecked(); + CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string))); } @@ -121,30 +117,24 @@ static void CheckFindCodeObject(Isolate* isolate) { CodeDesc desc; assm.GetCode(&desc); - Heap* heap = isolate->heap(); - Object* code = heap->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); CHECK(code->IsCode()); - HeapObject* obj = HeapObject::cast(code); + HeapObject* obj = HeapObject::cast(*code); Address obj_addr = obj->address(); for (int i = 0; i < obj->Size(); i += kPointerSize) { Object* found = isolate->FindCodeObject(obj_addr + i); - CHECK_EQ(code, found); + CHECK_EQ(*code, found); } - Object* copy = heap->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(copy->IsCode()); - HeapObject* obj_copy = HeapObject::cast(copy); + Handle<Code> copy = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); + HeapObject* obj_copy = HeapObject::cast(*copy); Object* not_right = isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2); - CHECK(not_right != code); + CHECK(not_right != *code); } @@ -165,59 +155,56 @@ TEST(HeapObjects) { Heap* heap = isolate->heap(); HandleScope sc(isolate); - Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked(); + Handle<Object> value = factory->NewNumber(1.000123); CHECK(value->IsHeapNumber()); CHECK(value->IsNumber()); CHECK_EQ(1.000123, value->Number()); - value = heap->NumberFromDouble(1.0)->ToObjectChecked(); + value = factory->NewNumber(1.0); CHECK(value->IsSmi()); CHECK(value->IsNumber()); CHECK_EQ(1.0, value->Number()); - value = heap->NumberFromInt32(1024)->ToObjectChecked(); + value = factory->NewNumberFromInt(1024); CHECK(value->IsSmi()); CHECK(value->IsNumber()); CHECK_EQ(1024.0, value->Number()); - value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked(); + value = factory->NewNumberFromInt(Smi::kMinValue); CHECK(value->IsSmi()); CHECK(value->IsNumber()); - CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value()); + CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value()); - value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked(); + value = factory->NewNumberFromInt(Smi::kMaxValue); CHECK(value->IsSmi()); CHECK(value->IsNumber()); - CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value()); + CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value()); #if !defined(V8_TARGET_ARCH_X64) && !defined(V8_TARGET_ARCH_ARM64) // TODO(lrn): We need a NumberFromIntptr function in order to test this. - value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked(); + value = factory->NewNumberFromInt(Smi::kMinValue - 1); CHECK(value->IsHeapNumber()); CHECK(value->IsNumber()); CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number()); #endif - MaybeObject* maybe_value = - heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1); - value = maybe_value->ToObjectChecked(); + value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1); CHECK(value->IsHeapNumber()); CHECK(value->IsNumber()); CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1), value->Number()); - maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31); - value = maybe_value->ToObjectChecked(); + value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31); CHECK(value->IsHeapNumber()); CHECK(value->IsNumber()); CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31), value->Number()); // nan oddball checks - CHECK(heap->nan_value()->IsNumber()); - CHECK(std::isnan(heap->nan_value()->Number())); + CHECK(factory->nan_value()->IsNumber()); + CHECK(std::isnan(factory->nan_value()->Number())); - Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest ")); + Handle<String> s = factory->NewStringFromStaticAscii("fisk hest "); CHECK(s->IsString()); CHECK_EQ(10, s->length()); @@ -248,12 +235,6 @@ TEST(Tagging) { int request = 24; CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request))); CHECK(Smi::FromInt(42)->IsSmi()); - CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure()); - CHECK_EQ(NEW_SPACE, - Failure::RetryAfterGC(NEW_SPACE)->allocation_space()); - CHECK_EQ(OLD_POINTER_SPACE, - Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space()); - CHECK(Failure::Exception()->IsFailure()); CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); } @@ -280,19 +261,23 @@ TEST(GarbageCollection) { { HandleScope inner_scope(isolate); // Allocate a function and keep it in global object's property. - Handle<JSFunction> function = - factory->NewFunction(name, factory->undefined_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + name, factory->undefined_value()); Handle<Map> initial_map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); function->set_initial_map(*initial_map); - JSReceiver::SetProperty(global, name, function, NONE, SLOPPY); + JSReceiver::SetProperty(global, name, function, NONE, SLOPPY).Check(); // Allocate an object. Unrooted after leaving the scope. Handle<JSObject> obj = factory->NewJSObject(function); - JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY); - JSReceiver::SetProperty(obj, prop_namex, twenty_four, NONE, SLOPPY); - - CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name)); - CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex)); + JSReceiver::SetProperty( + obj, prop_name, twenty_three, NONE, SLOPPY).Check(); + JSReceiver::SetProperty( + obj, prop_namex, twenty_four, NONE, SLOPPY).Check(); + + CHECK_EQ(Smi::FromInt(23), + *Object::GetProperty(obj, prop_name).ToHandleChecked()); + CHECK_EQ(Smi::FromInt(24), + *Object::GetProperty(obj, prop_namex).ToHandleChecked()); } heap->CollectGarbage(NEW_SPACE); @@ -300,35 +285,36 @@ TEST(GarbageCollection) { // Function should be alive. CHECK(JSReceiver::HasLocalProperty(global, name)); // Check function is retained. - Object* func_value = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*name)->ToObjectChecked(); + Handle<Object> func_value = + Object::GetProperty(global, name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); { HandleScope inner_scope(isolate); // Allocate another object, make it reachable from global. Handle<JSObject> obj = factory->NewJSObject(function); - JSReceiver::SetProperty(global, obj_name, obj, NONE, SLOPPY); - JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY); + JSReceiver::SetProperty(global, obj_name, obj, NONE, SLOPPY).Check(); + JSReceiver::SetProperty( + obj, prop_name, twenty_three, NONE, SLOPPY).Check(); } // After gc, it should survive. heap->CollectGarbage(NEW_SPACE); CHECK(JSReceiver::HasLocalProperty(global, obj_name)); - CHECK(CcTest::i_isolate()->context()->global_object()-> - GetProperty(*obj_name)->ToObjectChecked()->IsJSObject()); - Object* obj = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*obj_name)->ToObjectChecked(); - JSObject* js_obj = JSObject::cast(obj); - CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name)); + Handle<Object> obj = + Object::GetProperty(global, obj_name).ToHandleChecked(); + CHECK(obj->IsJSObject()); + CHECK_EQ(Smi::FromInt(23), + *Object::GetProperty(obj, prop_name).ToHandleChecked()); } static void VerifyStringAllocation(Isolate* isolate, const char* string) { HandleScope scope(isolate); - Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string)); + Handle<String> s = isolate->factory()->NewStringFromUtf8( + CStrVector(string)).ToHandleChecked(); CHECK_EQ(StrLength(string), s->length()); for (int index = 0; index < s->length(); index++) { CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index)); @@ -355,7 +341,7 @@ TEST(LocalHandles) { v8::HandleScope scope(CcTest::isolate()); const char* name = "Kasper the spunky"; - Handle<String> string = factory->NewStringFromAscii(CStrVector(name)); + Handle<String> string = factory->NewStringFromAsciiChecked(name); CHECK_EQ(StrLength(name), string->length()); } @@ -375,7 +361,7 @@ TEST(GlobalHandles) { { HandleScope scope(isolate); - Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); + Handle<Object> i = factory->NewStringFromStaticAscii("fisk"); Handle<Object> u = factory->NewNumber(1.12344); h1 = global_handles->Create(*i); @@ -430,7 +416,7 @@ TEST(WeakGlobalHandlesScavenge) { { HandleScope scope(isolate); - Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); + Handle<Object> i = factory->NewStringFromStaticAscii("fisk"); Handle<Object> u = factory->NewNumber(1.12344); h1 = global_handles->Create(*i); @@ -472,7 +458,7 @@ TEST(WeakGlobalHandlesMark) { { HandleScope scope(isolate); - Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); + Handle<Object> i = factory->NewStringFromStaticAscii("fisk"); Handle<Object> u = factory->NewNumber(1.12344); h1 = global_handles->Create(*i); @@ -518,7 +504,7 @@ TEST(DeleteWeakGlobalHandle) { { HandleScope scope(isolate); - Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); + Handle<Object> i = factory->NewStringFromStaticAscii("fisk"); h = global_handles->Create(*i); } @@ -604,17 +590,20 @@ static const char* not_so_random_string_table[] = { static void CheckInternalizedStrings(const char** strings) { + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); for (const char* string = *strings; *strings != 0; string = *strings++) { - Object* a; - MaybeObject* maybe_a = CcTest::heap()->InternalizeUtf8String(string); + HandleScope scope(isolate); + Handle<String> a = + isolate->factory()->InternalizeUtf8String(CStrVector(string)); // InternalizeUtf8String may return a failure if a GC is needed. - if (!maybe_a->ToObject(&a)) continue; CHECK(a->IsInternalizedString()); - Object* b; - MaybeObject* maybe_b = CcTest::heap()->InternalizeUtf8String(string); - if (!maybe_b->ToObject(&b)) continue; - CHECK_EQ(b, a); - CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string))); + Handle<String> b = factory->InternalizeUtf8String(string); + CHECK_EQ(*b, *a); + CHECK(b->IsUtf8EqualTo(CStrVector(string))); + b = isolate->factory()->InternalizeUtf8String(CStrVector(string)); + CHECK_EQ(*b, *a); + CHECK(b->IsUtf8EqualTo(CStrVector(string))); } } @@ -622,6 +611,7 @@ static void CheckInternalizedStrings(const char** strings) { TEST(StringTable) { CcTest::InitializeVM(); + v8::HandleScope sc(CcTest::isolate()); CheckInternalizedStrings(not_so_random_string_table); CheckInternalizedStrings(not_so_random_string_table); } @@ -634,8 +624,8 @@ TEST(FunctionAllocation) { v8::HandleScope sc(CcTest::isolate()); Handle<String> name = factory->InternalizeUtf8String("theFunction"); - Handle<JSFunction> function = - factory->NewFunction(name, factory->undefined_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + name, factory->undefined_value()); Handle<Map> initial_map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); function->set_initial_map(*initial_map); @@ -645,11 +635,14 @@ TEST(FunctionAllocation) { Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); Handle<JSObject> obj = factory->NewJSObject(function); - JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY); - CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name)); + JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY).Check(); + CHECK_EQ(Smi::FromInt(23), + *Object::GetProperty(obj, prop_name).ToHandleChecked()); // Check that we can add properties to function objects. - JSReceiver::SetProperty(function, prop_name, twenty_four, NONE, SLOPPY); - CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name)); + JSReceiver::SetProperty( + function, prop_name, twenty_four, NONE, SLOPPY).Check(); + CHECK_EQ(Smi::FromInt(24), + *Object::GetProperty(function, prop_name).ToHandleChecked()); } @@ -659,11 +652,10 @@ TEST(ObjectProperties) { Factory* factory = isolate->factory(); v8::HandleScope sc(CcTest::isolate()); - String* object_string = String::cast(CcTest::heap()->Object_string()); - Object* raw_object = CcTest::i_isolate()->context()->global_object()-> - GetProperty(object_string)->ToObjectChecked(); - JSFunction* object_function = JSFunction::cast(raw_object); - Handle<JSFunction> constructor(object_function); + Handle<String> object_string(String::cast(CcTest::heap()->Object_string())); + Handle<Object> object = Object::GetProperty( + CcTest::i_isolate()->global_object(), object_string).ToHandleChecked(); + Handle<JSFunction> constructor = Handle<JSFunction>::cast(object); Handle<JSObject> obj = factory->NewJSObject(constructor); Handle<String> first = factory->InternalizeUtf8String("first"); Handle<String> second = factory->InternalizeUtf8String("second"); @@ -675,51 +667,51 @@ TEST(ObjectProperties) { CHECK(!JSReceiver::HasLocalProperty(obj, first)); // add first - JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY); + JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check(); CHECK(JSReceiver::HasLocalProperty(obj, first)); // delete first - JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION); + JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check(); CHECK(!JSReceiver::HasLocalProperty(obj, first)); // add first and then second - JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY); - JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY); + JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check(); + JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check(); CHECK(JSReceiver::HasLocalProperty(obj, first)); CHECK(JSReceiver::HasLocalProperty(obj, second)); // delete first and then second - JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION); + JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check(); CHECK(JSReceiver::HasLocalProperty(obj, second)); - JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION); + JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check(); CHECK(!JSReceiver::HasLocalProperty(obj, first)); CHECK(!JSReceiver::HasLocalProperty(obj, second)); // add first and then second - JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY); - JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY); + JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check(); + JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check(); CHECK(JSReceiver::HasLocalProperty(obj, first)); CHECK(JSReceiver::HasLocalProperty(obj, second)); // delete second and then first - JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION); + JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check(); CHECK(JSReceiver::HasLocalProperty(obj, first)); - JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION); + JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check(); CHECK(!JSReceiver::HasLocalProperty(obj, first)); CHECK(!JSReceiver::HasLocalProperty(obj, second)); // check string and internalized string match const char* string1 = "fisk"; - Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1)); - JSReceiver::SetProperty(obj, s1, one, NONE, SLOPPY); + Handle<String> s1 = factory->NewStringFromAsciiChecked(string1); + JSReceiver::SetProperty(obj, s1, one, NONE, SLOPPY).Check(); Handle<String> s1_string = factory->InternalizeUtf8String(string1); CHECK(JSReceiver::HasLocalProperty(obj, s1_string)); // check internalized string and string match const char* string2 = "fugl"; Handle<String> s2_string = factory->InternalizeUtf8String(string2); - JSReceiver::SetProperty(obj, s2_string, one, NONE, SLOPPY); - Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2)); + JSReceiver::SetProperty(obj, s2_string, one, NONE, SLOPPY).Check(); + Handle<String> s2 = factory->NewStringFromAsciiChecked(string2); CHECK(JSReceiver::HasLocalProperty(obj, s2)); } @@ -731,8 +723,8 @@ TEST(JSObjectMaps) { v8::HandleScope sc(CcTest::isolate()); Handle<String> name = factory->InternalizeUtf8String("theFunction"); - Handle<JSFunction> function = - factory->NewFunction(name, factory->undefined_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + name, factory->undefined_value()); Handle<Map> initial_map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); function->set_initial_map(*initial_map); @@ -742,8 +734,9 @@ TEST(JSObjectMaps) { // Set a propery Handle<Smi> twenty_three(Smi::FromInt(23), isolate); - JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY); - CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name)); + JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY).Check(); + CHECK_EQ(Smi::FromInt(23), + *Object::GetProperty(obj, prop_name).ToHandleChecked()); // Check the map has changed CHECK(*initial_map != obj->map()); @@ -757,32 +750,33 @@ TEST(JSArray) { v8::HandleScope sc(CcTest::isolate()); Handle<String> name = factory->InternalizeUtf8String("Array"); - Object* raw_object = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*name)->ToObjectChecked(); - Handle<JSFunction> function = Handle<JSFunction>( - JSFunction::cast(raw_object)); + Handle<Object> fun_obj = Object::GetProperty( + CcTest::i_isolate()->global_object(), name).ToHandleChecked(); + Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj); // Allocate the object. + Handle<Object> element; Handle<JSObject> object = factory->NewJSObject(function); Handle<JSArray> array = Handle<JSArray>::cast(object); // We just initialized the VM, no heap allocation failure yet. JSArray::Initialize(array, 0); // Set array length to 0. - *JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)); + JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check(); CHECK_EQ(Smi::FromInt(0), array->length()); // Must be in fast mode. CHECK(array->HasFastSmiOrObjectElements()); // array[length] = name. - JSReceiver::SetElement(array, 0, name, NONE, SLOPPY); + JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check(); CHECK_EQ(Smi::FromInt(1), array->length()); - CHECK_EQ(*i::Object::GetElement(isolate, array, 0), *name); + element = i::Object::GetElement(isolate, array, 0).ToHandleChecked(); + CHECK_EQ(*element, *name); // Set array length with larger than smi value. Handle<Object> length = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1); - *JSArray::SetElementsLength(array, length); + JSArray::SetElementsLength(array, length).Check(); uint32_t int_length = 0; CHECK(length->ToArrayIndex(&int_length)); @@ -790,12 +784,14 @@ TEST(JSArray) { CHECK(array->HasDictionaryElements()); // Must be in slow mode. // array[length] = name. - JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY); + JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check(); uint32_t new_int_length = 0; CHECK(array->length()->ToArrayIndex(&new_int_length)); CHECK_EQ(static_cast<double>(int_length), new_int_length - 1); - CHECK_EQ(*i::Object::GetElement(isolate, array, int_length), *name); - CHECK_EQ(*i::Object::GetElement(isolate, array, 0), *name); + element = Object::GetElement(isolate, array, int_length).ToHandleChecked(); + CHECK_EQ(*element, *name); + element = Object::GetElement(isolate, array, 0).ToHandleChecked(); + CHECK_EQ(*element, *name); } @@ -805,11 +801,10 @@ TEST(JSObjectCopy) { Factory* factory = isolate->factory(); v8::HandleScope sc(CcTest::isolate()); - String* object_string = String::cast(CcTest::heap()->Object_string()); - Object* raw_object = CcTest::i_isolate()->context()->global_object()-> - GetProperty(object_string)->ToObjectChecked(); - JSFunction* object_function = JSFunction::cast(raw_object); - Handle<JSFunction> constructor(object_function); + Handle<String> object_string(String::cast(CcTest::heap()->Object_string())); + Handle<Object> object = Object::GetProperty( + CcTest::i_isolate()->global_object(), object_string).ToHandleChecked(); + Handle<JSFunction> constructor = Handle<JSFunction>::cast(object); Handle<JSObject> obj = factory->NewJSObject(constructor); Handle<String> first = factory->InternalizeUtf8String("first"); Handle<String> second = factory->InternalizeUtf8String("second"); @@ -817,38 +812,51 @@ TEST(JSObjectCopy) { Handle<Smi> one(Smi::FromInt(1), isolate); Handle<Smi> two(Smi::FromInt(2), isolate); - JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY); - JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY); + JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check(); + JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check(); - JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY); - JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY); + JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check(); + JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check(); // Make the clone. - Handle<JSObject> clone = JSObject::Copy(obj); + Handle<Object> value1, value2; + Handle<JSObject> clone = factory->CopyJSObject(obj); CHECK(!clone.is_identical_to(obj)); - CHECK_EQ(*i::Object::GetElement(isolate, obj, 0), - *i::Object::GetElement(isolate, clone, 0)); - CHECK_EQ(*i::Object::GetElement(isolate, obj, 1), - *i::Object::GetElement(isolate, clone, 1)); + value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked(); + value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked(); + CHECK_EQ(*value1, *value2); + value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked(); + value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked(); + CHECK_EQ(*value1, *value2); - CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first)); - CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second)); + value1 = Object::GetProperty(obj, first).ToHandleChecked(); + value2 = Object::GetProperty(clone, first).ToHandleChecked(); + CHECK_EQ(*value1, *value2); + value1 = Object::GetProperty(obj, second).ToHandleChecked(); + value2 = Object::GetProperty(clone, second).ToHandleChecked(); + CHECK_EQ(*value1, *value2); // Flip the values. - JSReceiver::SetProperty(clone, first, two, NONE, SLOPPY); - JSReceiver::SetProperty(clone, second, one, NONE, SLOPPY); - - JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY); - JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY); - - CHECK_EQ(*i::Object::GetElement(isolate, obj, 1), - *i::Object::GetElement(isolate, clone, 0)); - CHECK_EQ(*i::Object::GetElement(isolate, obj, 0), - *i::Object::GetElement(isolate, clone, 1)); - - CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first)); - CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second)); + JSReceiver::SetProperty(clone, first, two, NONE, SLOPPY).Check(); + JSReceiver::SetProperty(clone, second, one, NONE, SLOPPY).Check(); + + JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check(); + JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check(); + + value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked(); + value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked(); + CHECK_EQ(*value1, *value2); + value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked(); + value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked(); + CHECK_EQ(*value1, *value2); + + value1 = Object::GetProperty(obj, second).ToHandleChecked(); + value2 = Object::GetProperty(clone, first).ToHandleChecked(); + CHECK_EQ(*value1, *value2); + value1 = Object::GetProperty(obj, first).ToHandleChecked(); + value2 = Object::GetProperty(clone, second).ToHandleChecked(); + CHECK_EQ(*value1, *value2); } @@ -877,12 +885,12 @@ TEST(StringAllocation) { Handle<String> ascii_sym = factory->InternalizeOneByteString(OneByteVector(ascii, length)); CHECK_EQ(length, ascii_sym->length()); - Handle<String> non_ascii_str = - factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length)); + Handle<String> non_ascii_str = factory->NewStringFromUtf8( + Vector<const char>(non_ascii, 3 * length)).ToHandleChecked(); non_ascii_str->Hash(); CHECK_EQ(length, non_ascii_str->length()); - Handle<String> ascii_str = - factory->NewStringFromUtf8(Vector<const char>(ascii, length)); + Handle<String> ascii_str = factory->NewStringFromUtf8( + Vector<const char>(ascii, length)).ToHandleChecked(); ascii_str->Hash(); CHECK_EQ(length, ascii_str->length()); DeleteArray(non_ascii); @@ -926,17 +934,16 @@ TEST(Iteration) { // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE objs[next_objs_index++] = - factory->NewStringFromAscii(CStrVector("abcdefghij")); + factory->NewStringFromStaticAscii("abcdefghij"); objs[next_objs_index++] = - factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED); + factory->NewStringFromStaticAscii("abcdefghij", TENURED); // Allocate a large string (for large object space). int large_size = Page::kMaxRegularHeapObjectSize + 1; char* str = new char[large_size]; for (int i = 0; i < large_size - 1; ++i) str[i] = 'a'; str[large_size - 1] = '\0'; - objs[next_objs_index++] = - factory->NewStringFromAscii(CStrVector(str), TENURED); + objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED); delete[] str; // Add a Map object to look for. @@ -972,8 +979,7 @@ TEST(Regression39128) { // Test case for crbug.com/39128. CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); - Heap* heap = isolate->heap(); + TestHeap* heap = CcTest::test_heap(); // Increase the chance of 'bump-the-pointer' allocation in old space. heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); @@ -988,9 +994,8 @@ TEST(Regression39128) { Handle<JSFunction> object_ctor( CcTest::i_isolate()->native_context()->object_function()); CHECK(object_ctor->has_initial_map()); - Handle<Map> object_map(object_ctor->initial_map()); // Create a map with single inobject property. - Handle<Map> my_map = factory->CopyMap(object_map, 1); + Handle<Map> my_map = Map::Create(object_ctor, 1); int n_properties = my_map->inobject_properties(); CHECK_GT(n_properties, 0); @@ -1007,8 +1012,7 @@ TEST(Regression39128) { Address* limit_addr = new_space->allocation_limit_address(); while ((*limit_addr - *top_addr) > allocation_amount) { CHECK(!heap->always_allocate()); - Object* array = heap->AllocateFixedArray(allocation_len)->ToObjectChecked(); - CHECK(!array->IsFailure()); + Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked(); CHECK(new_space->Contains(array)); } @@ -1018,11 +1022,10 @@ TEST(Regression39128) { CHECK(fixed_array_len < FixedArray::kMaxLength); CHECK(!heap->always_allocate()); - Object* array = heap->AllocateFixedArray(fixed_array_len)->ToObjectChecked(); - CHECK(!array->IsFailure()); + Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked(); CHECK(new_space->Contains(array)); - Object* object = heap->AllocateJSObjectFromMap(*my_map)->ToObjectChecked(); + Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked(); CHECK(new_space->Contains(object)); JSObject* jsobject = JSObject::cast(object); CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length()); @@ -1036,7 +1039,7 @@ TEST(Regression39128) { // in old pointer space. Address old_pointer_space_top = heap->old_pointer_space()->top(); AlwaysAllocateScope aa_scope(isolate); - Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked(); + Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked(); JSObject* clone = JSObject::cast(clone_obj); if (clone->address() != old_pointer_space_top) { // Alas, got allocated from free list, we cannot do checks. @@ -1069,10 +1072,10 @@ TEST(TestCodeFlushing) { } // Check function is compiled. - Object* func_value = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*foo_name)->ToObjectChecked(); + Handle<Object> func_value = Object::GetProperty( + CcTest::i_isolate()->global_object(), foo_name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); CHECK(function->shared()->is_compiled()); // The code will survive at least two GCs. @@ -1102,7 +1105,7 @@ TEST(TestCodeFlushingPreAged) { i::FLAG_allow_natives_syntax = true; i::FLAG_optimize_for_size = true; CcTest::InitializeVM(); - Isolate* isolate = Isolate::Current(); + Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); v8::HandleScope scope(CcTest::isolate()); const char* source = "function foo() {" @@ -1119,10 +1122,10 @@ TEST(TestCodeFlushingPreAged) { } // Check function is compiled. - Object* func_value = Isolate::Current()->context()->global_object()-> - GetProperty(*foo_name)->ToObjectChecked(); + Handle<Object> func_value = + Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); CHECK(function->shared()->is_compiled()); // The code has been run so will survive at least one GC. @@ -1184,10 +1187,10 @@ TEST(TestCodeFlushingIncremental) { } // Check function is compiled. - Object* func_value = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*foo_name)->ToObjectChecked(); + Handle<Object> func_value = + Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); CHECK(function->shared()->is_compiled()); // The code will survive at least two GCs. @@ -1261,15 +1264,15 @@ TEST(TestCodeFlushingIncrementalScavenge) { } // Check functions are compiled. - Object* func_value = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*foo_name)->ToObjectChecked(); + Handle<Object> func_value = + Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); CHECK(function->shared()->is_compiled()); - Object* func_value2 = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*bar_name)->ToObjectChecked(); + Handle<Object> func_value2 = + Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked(); CHECK(func_value2->IsJSFunction()); - Handle<JSFunction> function2(JSFunction::cast(func_value2)); + Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2); CHECK(function2->shared()->is_compiled()); // Clear references to functions so that one of them can die. @@ -1323,10 +1326,10 @@ TEST(TestCodeFlushingIncrementalAbort) { } // Check function is compiled. - Object* func_value = CcTest::i_isolate()->context()->global_object()-> - GetProperty(*foo_name)->ToObjectChecked(); + Handle<Object> func_value = + Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); CHECK(func_value->IsJSFunction()); - Handle<JSFunction> function(JSFunction::cast(func_value)); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); CHECK(function->shared()->is_compiled()); // The code will survive at least two GCs. @@ -1344,7 +1347,6 @@ TEST(TestCodeFlushingIncrementalAbort) { // code flushing candidate. SimulateIncrementalMarking(); -#ifdef ENABLE_DEBUGGER_SUPPORT // Enable the debugger and add a breakpoint while incremental marking // is running so that incremental marking aborts and code flushing is // disabled. @@ -1352,7 +1354,6 @@ TEST(TestCodeFlushingIncrementalAbort) { Handle<Object> breakpoint_object(Smi::FromInt(0), isolate); isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); isolate->debug()->ClearAllBreakPoints(); -#endif // ENABLE_DEBUGGER_SUPPORT // Force optimization now that code flushing is disabled. { v8::HandleScope scope(CcTest::isolate()); @@ -1603,16 +1604,20 @@ TEST(TestSizeOfObjects) { CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); - CHECK(CcTest::heap()->old_pointer_space()->IsLazySweepingComplete()); + MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); + if (collector->IsConcurrentSweepingInProgress()) { + collector->WaitUntilSweepingCompleted(); + } int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); { // Allocate objects on several different old-space pages so that - // lazy sweeping kicks in for subsequent GC runs. + // concurrent sweeper threads will be busy sweeping the old space on + // subsequent GC runs. AlwaysAllocateScope always_allocate(CcTest::i_isolate()); int filler_size = static_cast<int>(FixedArray::SizeFor(8192)); for (int i = 1; i <= 100; i++) { - CcTest::heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked(); + CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked(); CHECK_EQ(initial_size + i * filler_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); } @@ -1626,11 +1631,11 @@ TEST(TestSizeOfObjects) { CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); - // Advancing the sweeper step-wise should not change the heap size. - while (!CcTest::heap()->old_pointer_space()->IsLazySweepingComplete()) { - CcTest::heap()->old_pointer_space()->AdvanceSweeper(KB); - CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); + // Waiting for sweeper threads should not change heap size. + if (collector->IsConcurrentSweepingInProgress()) { + collector->WaitUntilSweepingCompleted(); } + CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); } @@ -2012,6 +2017,7 @@ TEST(InstanceOfStubWriteBarrier) { TEST(PrototypeTransitionClearing) { + if (FLAG_never_compact) return; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); @@ -2067,10 +2073,6 @@ TEST(PrototypeTransitionClearing) { CHECK(!space->LastPage()->Contains( map->GetPrototypeTransitions()->address())); CHECK(space->LastPage()->Contains(prototype->address())); - JSObject::SetPrototype(baseObject, prototype, false); - CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap()); - CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); - CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap()); } @@ -2206,7 +2208,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) { TEST(OptimizedPretenuringAllocationFolding) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; i::FLAG_allocation_site_pretenuring = false; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; @@ -2249,7 +2251,7 @@ TEST(OptimizedPretenuringAllocationFolding) { TEST(OptimizedPretenuringAllocationFoldingBlocks) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; i::FLAG_allocation_site_pretenuring = false; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; @@ -2292,7 +2294,7 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) { TEST(OptimizedPretenuringObjectArrayLiterals) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2321,7 +2323,7 @@ TEST(OptimizedPretenuringObjectArrayLiterals) { TEST(OptimizedPretenuringMixedInObjectProperties) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2356,7 +2358,7 @@ TEST(OptimizedPretenuringMixedInObjectProperties) { TEST(OptimizedPretenuringDoubleArrayProperties) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2385,7 +2387,7 @@ TEST(OptimizedPretenuringDoubleArrayProperties) { TEST(OptimizedPretenuringdoubleArrayLiterals) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2414,7 +2416,7 @@ TEST(OptimizedPretenuringdoubleArrayLiterals) { TEST(OptimizedPretenuringNestedMixedArrayLiterals) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2452,7 +2454,7 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) { TEST(OptimizedPretenuringNestedObjectLiterals) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2490,7 +2492,7 @@ TEST(OptimizedPretenuringNestedObjectLiterals) { TEST(OptimizedPretenuringNestedDoubleLiterals) { i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2536,7 +2538,7 @@ TEST(OptimizedPretenuringConstructorCalls) { return; } i::FLAG_allow_natives_syntax = true; - i::FLAG_max_new_space_size = 2048; + i::FLAG_max_new_space_size = 2; CcTest::InitializeVM(); if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; @@ -2635,14 +2637,15 @@ TEST(Regress1465) { v8::HandleScope scope(CcTest::isolate()); static const int transitions_count = 256; + CompileRun("function F() {}"); { AlwaysAllocateScope always_allocate(CcTest::i_isolate()); for (int i = 0; i < transitions_count; i++) { EmbeddedVector<char, 64> buffer; - OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i); + OS::SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i); CompileRun(buffer.start()); } - CompileRun("var root = new Object;"); + CompileRun("var root = new F;"); } Handle<JSObject> root = @@ -2666,6 +2669,149 @@ TEST(Regress1465) { } +#ifdef DEBUG +static void AddTransitions(int transitions_count) { + AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + for (int i = 0; i < transitions_count; i++) { + EmbeddedVector<char, 64> buffer; + OS::SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i); + CompileRun(buffer.start()); + } +} + + +static Handle<JSObject> GetByName(const char* name) { + return v8::Utils::OpenHandle( + *v8::Handle<v8::Object>::Cast( + CcTest::global()->Get(v8_str(name)))); +} + + +static void AddPropertyTo( + int gc_count, Handle<JSObject> object, const char* property_name) { + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); + Handle<String> prop_name = factory->InternalizeUtf8String(property_name); + Handle<Smi> twenty_three(Smi::FromInt(23), isolate); + i::FLAG_gc_interval = gc_count; + i::FLAG_gc_global = true; + CcTest::heap()->set_allocation_timeout(gc_count); + JSReceiver::SetProperty( + object, prop_name, twenty_three, NONE, SLOPPY).Check(); +} + + +TEST(TransitionArrayShrinksDuringAllocToZero) { + i::FLAG_stress_compaction = false; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + static const int transitions_count = 10; + CompileRun("function F() { }"); + AddTransitions(transitions_count); + CompileRun("var root = new F;"); + Handle<JSObject> root = GetByName("root"); + + // Count number of live transitions before marking. + int transitions_before = CountMapTransitions(root->map()); + CHECK_EQ(transitions_count, transitions_before); + + // Get rid of o + CompileRun("o = new F;" + "root = new F"); + root = GetByName("root"); + AddPropertyTo(2, root, "funny"); + + // Count number of live transitions after marking. Note that one transition + // is left, because 'o' still holds an instance of one transition target. + int transitions_after = CountMapTransitions( + Map::cast(root->map()->GetBackPointer())); + CHECK_EQ(1, transitions_after); +} + + +TEST(TransitionArrayShrinksDuringAllocToOne) { + i::FLAG_stress_compaction = false; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + static const int transitions_count = 10; + CompileRun("function F() {}"); + AddTransitions(transitions_count); + CompileRun("var root = new F;"); + Handle<JSObject> root = GetByName("root"); + + // Count number of live transitions before marking. + int transitions_before = CountMapTransitions(root->map()); + CHECK_EQ(transitions_count, transitions_before); + + root = GetByName("root"); + AddPropertyTo(2, root, "funny"); + + // Count number of live transitions after marking. Note that one transition + // is left, because 'o' still holds an instance of one transition target. + int transitions_after = CountMapTransitions( + Map::cast(root->map()->GetBackPointer())); + CHECK_EQ(2, transitions_after); +} + + +TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) { + i::FLAG_stress_compaction = false; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + static const int transitions_count = 10; + CompileRun("function F() {}"); + AddTransitions(transitions_count); + CompileRun("var root = new F;"); + Handle<JSObject> root = GetByName("root"); + + // Count number of live transitions before marking. + int transitions_before = CountMapTransitions(root->map()); + CHECK_EQ(transitions_count, transitions_before); + + root = GetByName("root"); + AddPropertyTo(0, root, "prop9"); + + // Count number of live transitions after marking. Note that one transition + // is left, because 'o' still holds an instance of one transition target. + int transitions_after = CountMapTransitions( + Map::cast(root->map()->GetBackPointer())); + CHECK_EQ(1, transitions_after); +} + + +TEST(TransitionArraySimpleToFull) { + i::FLAG_stress_compaction = false; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + static const int transitions_count = 1; + CompileRun("function F() {}"); + AddTransitions(transitions_count); + CompileRun("var root = new F;"); + Handle<JSObject> root = GetByName("root"); + + // Count number of live transitions before marking. + int transitions_before = CountMapTransitions(root->map()); + CHECK_EQ(transitions_count, transitions_before); + + CompileRun("o = new F;" + "root = new F"); + root = GetByName("root"); + ASSERT(root->map()->transitions()->IsSimpleTransition()); + AddPropertyTo(2, root, "happy"); + + // Count number of live transitions after marking. Note that one transition + // is left, because 'o' still holds an instance of one transition target. + int transitions_after = CountMapTransitions( + Map::cast(root->map()->GetBackPointer())); + CHECK_EQ(1, transitions_after); +} +#endif // DEBUG + + TEST(Regress2143a) { i::FLAG_collect_maps = true; i::FLAG_incremental_marking = true; @@ -2751,6 +2897,7 @@ TEST(Regress2143b) { TEST(ReleaseOverReservedPages) { + if (FLAG_never_compact) return; i::FLAG_trace_gc = true; // The optimizer can allocate stuff, messing up the test. i::FLAG_crankshaft = false; @@ -2808,7 +2955,7 @@ TEST(Regress2237) { // Generate a parent that lives in new-space. v8::HandleScope inner_scope(CcTest::isolate()); const char* c = "This text is long enough to trigger sliced strings."; - Handle<String> s = factory->NewStringFromAscii(CStrVector(c)); + Handle<String> s = factory->NewStringFromAsciiChecked(c); CHECK(s->IsSeqOneByteString()); CHECK(CcTest::heap()->InNewSpace(*s)); @@ -2852,7 +2999,7 @@ TEST(Regress2211) { v8::Handle<v8::String> value = v8_str("val string"); Smi* hash = Smi::FromInt(321); - Heap* heap = CcTest::heap(); + Factory* factory = CcTest::i_isolate()->factory(); for (int i = 0; i < 2; i++) { // Store identity hash first and common hidden property second. @@ -2868,7 +3015,7 @@ TEST(Regress2211) { // Check values. CHECK_EQ(hash, - internal_obj->GetHiddenProperty(heap->identity_hash_string())); + internal_obj->GetHiddenProperty(factory->identity_hash_string())); CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string")))); // Check size. @@ -2910,8 +3057,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) { *v8::Handle<v8::Function>::Cast( CcTest::global()->Get(v8_str("f")))); - Handle<FixedArray> feedback_vector(TypeFeedbackInfo::cast( - f->shared()->code()->type_feedback_info())->feedback_vector()); + Handle<FixedArray> feedback_vector(f->shared()->feedback_vector()); CHECK_EQ(2, feedback_vector->length()); CHECK(feedback_vector->get(0)->IsJSFunction()); @@ -3334,8 +3480,10 @@ static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, *space->allocation_limit_address() - *space->allocation_top_address()); CHECK(space_remaining >= extra_bytes); int new_linear_size = space_remaining - extra_bytes; - v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size); - v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe); + v8::internal::AllocationResult allocation = + space->AllocateRaw(new_linear_size); + v8::internal::FreeListNode* node = + v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); node->set_size(space->heap(), new_linear_size); } @@ -3393,14 +3541,13 @@ TEST(Regress169928) { // We need filler the size of AllocationMemento object, plus an extra // fill pointer value. - MaybeObject* maybe_object = CcTest::heap()->AllocateRaw( - AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE); - Object* obj = NULL; - CHECK(maybe_object->ToObject(&obj)); - Address addr_obj = reinterpret_cast<Address>( - reinterpret_cast<byte*>(obj - kHeapObjectTag)); - CcTest::heap()->CreateFillerObjectAt(addr_obj, - AllocationMemento::kSize + kPointerSize); + HeapObject* obj = NULL; + AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw( + AllocationMemento::kSize + kPointerSize); + CHECK(allocation.To(&obj)); + Address addr_obj = obj->address(); + CcTest::heap()->CreateFillerObjectAt( + addr_obj, AllocationMemento::kSize + kPointerSize); // Give the array a name, making sure not to allocate strings. v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array); @@ -3414,6 +3561,7 @@ TEST(Regress169928) { TEST(Regress168801) { + if (i::FLAG_never_compact) return; i::FLAG_always_compact = true; i::FLAG_cache_optimized_code = false; i::FLAG_allow_natives_syntax = true; @@ -3470,6 +3618,7 @@ TEST(Regress168801) { TEST(Regress173458) { + if (i::FLAG_never_compact) return; i::FLAG_always_compact = true; i::FLAG_cache_optimized_code = false; i::FLAG_allow_natives_syntax = true; @@ -3513,10 +3662,8 @@ TEST(Regress173458) { // explicitly enqueued. SimulateIncrementalMarking(); -#ifdef ENABLE_DEBUGGER_SUPPORT // Now enable the debugger which in turn will disable code flushing. CHECK(isolate->debug()->Load()); -#endif // ENABLE_DEBUGGER_SUPPORT // This cycle will bust the heap and subsequent cycles will go ballistic. heap->CollectAllGarbage(Heap::kNoGCFlags); @@ -3848,6 +3995,114 @@ TEST(NextCodeLinkIsWeak2) { } +static bool weak_ic_cleared = false; + +static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) { + printf("clear weak is called\n"); + weak_ic_cleared = true; + v8::Persistent<v8::Value>* p = + reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter()); + CHECK(p->IsNearDeath()); + p->Reset(); +} + + +// Checks that the value returned by execution of the source is weak. +void CheckWeakness(const char* source) { + i::FLAG_stress_compaction = false; + CcTest::InitializeVM(); + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Persistent<v8::Object> garbage; + { + v8::HandleScope scope(isolate); + garbage.Reset(isolate, CompileRun(source)->ToObject()); + } + weak_ic_cleared = false; + garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC); + Heap* heap = CcTest::i_isolate()->heap(); + heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); + CHECK(weak_ic_cleared); +} + + +// Each of the following "weak IC" tests creates an IC that embeds a map with +// the prototype pointing to _proto_ and checks that the _proto_ dies on GC. +TEST(WeakMapInMonomorphicLoadIC) { + CheckWeakness("function loadIC(obj) {" + " return obj.name;" + "}" + " (function() {" + " var proto = {'name' : 'weak'};" + " var obj = Object.create(proto);" + " loadIC(obj);" + " loadIC(obj);" + " loadIC(obj);" + " return proto;" + " })();"); +} + + +TEST(WeakMapInMonomorphicKeyedLoadIC) { + CheckWeakness("function keyedLoadIC(obj, field) {" + " return obj[field];" + "}" + " (function() {" + " var proto = {'name' : 'weak'};" + " var obj = Object.create(proto);" + " keyedLoadIC(obj, 'name');" + " keyedLoadIC(obj, 'name');" + " keyedLoadIC(obj, 'name');" + " return proto;" + " })();"); +} + + +TEST(WeakMapInMonomorphicStoreIC) { + CheckWeakness("function storeIC(obj, value) {" + " obj.name = value;" + "}" + " (function() {" + " var proto = {'name' : 'weak'};" + " var obj = Object.create(proto);" + " storeIC(obj, 'x');" + " storeIC(obj, 'x');" + " storeIC(obj, 'x');" + " return proto;" + " })();"); +} + + +TEST(WeakMapInMonomorphicKeyedStoreIC) { + CheckWeakness("function keyedStoreIC(obj, field, value) {" + " obj[field] = value;" + "}" + " (function() {" + " var proto = {'name' : 'weak'};" + " var obj = Object.create(proto);" + " keyedStoreIC(obj, 'x');" + " keyedStoreIC(obj, 'x');" + " keyedStoreIC(obj, 'x');" + " return proto;" + " })();"); +} + + +TEST(WeakMapInMonomorphicCompareNilIC) { + CheckWeakness("function compareNilIC(obj) {" + " return obj == null;" + "}" + " (function() {" + " var proto = {'name' : 'weak'};" + " var obj = Object.create(proto);" + " compareNilIC(obj);" + " compareNilIC(obj);" + " compareNilIC(obj);" + " return proto;" + " })();"); +} + + #ifdef DEBUG TEST(AddInstructionChangesNewSpacePromotion) { i::FLAG_allow_natives_syntax = true; @@ -3913,3 +4168,57 @@ TEST(CEntryStubOOM) { } #endif // DEBUG + + +static void InterruptCallback357137(v8::Isolate* isolate, void* data) { } + + +static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) { + CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL); +} + + +TEST(Regress357137) { + CcTest::InitializeVM(); + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope hscope(isolate); + v8::Handle<v8::ObjectTemplate> global =v8::ObjectTemplate::New(isolate); + global->Set(v8::String::NewFromUtf8(isolate, "interrupt"), + v8::FunctionTemplate::New(isolate, RequestInterrupt)); + v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global); + ASSERT(!context.IsEmpty()); + v8::Context::Scope cscope(context); + + v8::Local<v8::Value> result = CompileRun( + "var locals = '';" + "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';" + "eval('function f() {' + locals + 'return function() { return v0; }; }');" + "interrupt();" // This triggers a fake stack overflow in f. + "f()()"); + CHECK_EQ(42.0, result->ToNumber()->Value()); +} + + +TEST(ArrayShiftSweeping) { + i::FLAG_expose_gc = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + Isolate* isolate = CcTest::i_isolate(); + Heap* heap = isolate->heap(); + + v8::Local<v8::Value> result = CompileRun( + "var array = new Array(40000);" + "var tmp = new Array(100000);" + "array[0] = 10;" + "gc();" + "array.shift();" + "array;"); + + Handle<JSObject> o = + v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result)); + CHECK(heap->InOldPointerSpace(o->elements())); + CHECK(heap->InOldPointerSpace(*o)); + Page* page = Page::FromAddress(o->elements()->address()); + CHECK(page->WasSwept() || + Marking::IsBlack(Marking::MarkBitFrom(o->elements()))); +} diff --git a/deps/v8/test/cctest/test-liveedit.cc b/deps/v8/test/cctest/test-liveedit.cc index 65a001d4b..1c64108c8 100644 --- a/deps/v8/test/cctest/test-liveedit.cc +++ b/deps/v8/test/cctest/test-liveedit.cc @@ -25,8 +25,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#ifdef ENABLE_DEBUGGER_SUPPORT - #include <stdlib.h> #include "v8.h" @@ -177,5 +175,3 @@ TEST(LiveEditDiffer) { CompareStrings("abbabababababaaabbabababababbabbbbbbbababa", "bbbbabababbbabababbbabababababbabbababa"); } - -#endif // ENABLE_DEBUGGER_SUPPORT diff --git a/deps/v8/test/cctest/test-log.cc b/deps/v8/test/cctest/test-log.cc index 42af0a555..e6ed75e64 100644 --- a/deps/v8/test/cctest/test-log.cc +++ b/deps/v8/test/cctest/test-log.cc @@ -39,8 +39,8 @@ #include "log-utils.h" #include "cpu-profiler.h" #include "natives.h" +#include "utils.h" #include "v8threads.h" -#include "v8utils.h" #include "cctest.h" #include "vm-state-inl.h" diff --git a/deps/v8/test/cctest/test-macro-assembler-arm.cc b/deps/v8/test/cctest/test-macro-assembler-arm.cc index d40b8a50c..8aed4c27b 100644 --- a/deps/v8/test/cctest/test-macro-assembler-arm.cc +++ b/deps/v8/test/cctest/test-macro-assembler-arm.cc @@ -91,13 +91,10 @@ TEST(CopyBytes) { CodeDesc desc; masm->GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); - F f = FUNCTION_CAST<F>(Code::cast(code)->entry()); + F f = FUNCTION_CAST<F>(code->entry()); // Initialise source data with non-zero bytes. for (int i = 0; i < data_size; i++) { @@ -218,14 +215,11 @@ TEST(LoadAndStoreWithRepresentation) { CodeDesc desc; masm->GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); // Call the function from C++. - F5 f = FUNCTION_CAST<F5>(Code::cast(code)->entry()); + F5 f = FUNCTION_CAST<F5>(code->entry()); CHECK_EQ(0, CALL_GENERATED_CODE(f, 0, 0, 0, 0, 0)); } diff --git a/deps/v8/test/cctest/test-macro-assembler-mips.cc b/deps/v8/test/cctest/test-macro-assembler-mips.cc index 3154aac59..a5045a8f0 100644 --- a/deps/v8/test/cctest/test-macro-assembler-mips.cc +++ b/deps/v8/test/cctest/test-macro-assembler-mips.cc @@ -91,13 +91,10 @@ TEST(CopyBytes) { CodeDesc desc; masm->GetCode(&desc); - Object* code = isolate->heap()->CreateCode( - desc, - Code::ComputeFlags(Code::STUB), - Handle<Code>())->ToObjectChecked(); - CHECK(code->IsCode()); + Handle<Code> code = isolate->factory()->NewCode( + desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); - ::F f = FUNCTION_CAST< ::F>(Code::cast(code)->entry()); + ::F f = FUNCTION_CAST< ::F>(code->entry()); // Initialise source data with non-zero bytes. for (int i = 0; i < data_size; i++) { diff --git a/deps/v8/test/cctest/test-macro-assembler-x64.cc b/deps/v8/test/cctest/test-macro-assembler-x64.cc index f29daccea..609bc6995 100644 --- a/deps/v8/test/cctest/test-macro-assembler-x64.cc +++ b/deps/v8/test/cctest/test-macro-assembler-x64.cc @@ -1516,7 +1516,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) { __ Move(rcx, Smi::FromInt(x)); SmiIndex index = masm->SmiToIndex(rdx, rcx, i); ASSERT(index.reg.is(rcx) || index.reg.is(rdx)); - __ shl(index.reg, Immediate(index.scale)); + __ shlq(index.reg, Immediate(index.scale)); __ Set(r8, static_cast<intptr_t>(x) << i); __ cmpq(index.reg, r8); __ j(not_equal, exit); @@ -1524,7 +1524,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) { __ Move(rcx, Smi::FromInt(x)); index = masm->SmiToIndex(rcx, rcx, i); ASSERT(index.reg.is(rcx)); - __ shl(rcx, Immediate(index.scale)); + __ shlq(rcx, Immediate(index.scale)); __ Set(r8, static_cast<intptr_t>(x) << i); __ cmpq(rcx, r8); __ j(not_equal, exit); @@ -1533,7 +1533,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) { __ Move(rcx, Smi::FromInt(x)); index = masm->SmiToNegativeIndex(rdx, rcx, i); ASSERT(index.reg.is(rcx) || index.reg.is(rdx)); - __ shl(index.reg, Immediate(index.scale)); + __ shlq(index.reg, Immediate(index.scale)); __ Set(r8, static_cast<intptr_t>(-x) << i); __ cmpq(index.reg, r8); __ j(not_equal, exit); @@ -1541,7 +1541,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) { __ Move(rcx, Smi::FromInt(x)); index = masm->SmiToNegativeIndex(rcx, rcx, i); ASSERT(index.reg.is(rcx)); - __ shl(rcx, Immediate(index.scale)); + __ shlq(rcx, Immediate(index.scale)); __ Set(r8, static_cast<intptr_t>(-x) << i); __ cmpq(rcx, r8); __ j(not_equal, exit); diff --git a/deps/v8/test/cctest/test-mark-compact.cc b/deps/v8/test/cctest/test-mark-compact.cc index 0200129b1..5f13bd25a 100644 --- a/deps/v8/test/cctest/test-mark-compact.cc +++ b/deps/v8/test/cctest/test-mark-compact.cc @@ -39,6 +39,7 @@ #include "v8.h" +#include "full-codegen.h" #include "global-handles.h" #include "snapshot.h" #include "cctest.h" @@ -75,8 +76,8 @@ TEST(MarkingDeque) { TEST(Promotion) { CcTest::InitializeVM(); - Heap* heap = CcTest::heap(); - heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB); + TestHeap* heap = CcTest::test_heap(); + heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB, 0); v8::HandleScope sc(CcTest::isolate()); @@ -84,7 +85,7 @@ TEST(Promotion) { int array_length = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / (4 * kPointerSize); - Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked(); + Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); Handle<FixedArray> array(FixedArray::cast(obj)); // Array should be in the new space. @@ -100,8 +101,8 @@ TEST(Promotion) { TEST(NoPromotion) { CcTest::InitializeVM(); - Heap* heap = CcTest::heap(); - heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB); + TestHeap* heap = CcTest::test_heap(); + heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB, 0); v8::HandleScope sc(CcTest::isolate()); @@ -109,7 +110,7 @@ TEST(NoPromotion) { int array_length = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / (2 * kPointerSize); - Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked(); + Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); Handle<FixedArray> array(FixedArray::cast(obj)); // Array should be in the new space. @@ -127,7 +128,8 @@ TEST(MarkCompactCollector) { FLAG_incremental_marking = false; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); - Heap* heap = isolate->heap(); + TestHeap* heap = CcTest::test_heap(); + Factory* factory = isolate->factory(); v8::HandleScope sc(CcTest::isolate()); Handle<GlobalObject> global(isolate->context()->global_object()); @@ -137,75 +139,63 @@ TEST(MarkCompactCollector) { // keep allocating garbage in new space until it fails const int ARRAY_SIZE = 100; - Object* array; - MaybeObject* maybe_array; + AllocationResult allocation; do { - maybe_array = heap->AllocateFixedArray(ARRAY_SIZE); - } while (maybe_array->ToObject(&array)); + allocation = heap->AllocateFixedArray(ARRAY_SIZE); + } while (!allocation.IsRetry()); heap->CollectGarbage(NEW_SPACE, "trigger 2"); - - array = heap->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked(); + heap->AllocateFixedArray(ARRAY_SIZE).ToObjectChecked(); // keep allocating maps until it fails - Object* mapp; - MaybeObject* maybe_mapp; do { - maybe_mapp = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - } while (maybe_mapp->ToObject(&mapp)); + allocation = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); + } while (!allocation.IsRetry()); heap->CollectGarbage(MAP_SPACE, "trigger 3"); - mapp = heap->AllocateMap(JS_OBJECT_TYPE, - JSObject::kHeaderSize)->ToObjectChecked(); - - // allocate a garbage - String* func_name = String::cast( - heap->InternalizeUtf8String("theFunction")->ToObjectChecked()); - SharedFunctionInfo* function_share = SharedFunctionInfo::cast( - heap->AllocateSharedFunctionInfo(func_name)->ToObjectChecked()); - JSFunction* function = JSFunction::cast( - heap->AllocateFunction(*isolate->sloppy_function_map(), - function_share, - heap->undefined_value())->ToObjectChecked()); - Map* initial_map = - Map::cast(heap->AllocateMap(JS_OBJECT_TYPE, - JSObject::kHeaderSize)->ToObjectChecked()); - function->set_initial_map(initial_map); - JSReceiver::SetProperty( - global, handle(func_name), handle(function), NONE, SLOPPY); - - JSObject* obj = JSObject::cast( - heap->AllocateJSObject(function)->ToObjectChecked()); + heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize).ToObjectChecked(); + + { HandleScope scope(isolate); + // allocate a garbage + Handle<String> func_name = factory->InternalizeUtf8String("theFunction"); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + func_name, factory->undefined_value()); + Handle<Map> initial_map = factory->NewMap( + JS_OBJECT_TYPE, JSObject::kHeaderSize); + function->set_initial_map(*initial_map); + JSReceiver::SetProperty(global, func_name, function, NONE, SLOPPY).Check(); + + factory->NewJSObject(function); + } + heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 4"); - func_name = String::cast( - heap->InternalizeUtf8String("theFunction")->ToObjectChecked()); - CHECK(JSReceiver::HasLocalProperty(global, handle(func_name))); - Object* func_value = isolate->context()->global_object()-> - GetProperty(func_name)->ToObjectChecked(); - CHECK(func_value->IsJSFunction()); - function = JSFunction::cast(func_value); - - obj = JSObject::cast(heap->AllocateJSObject(function)->ToObjectChecked()); - String* obj_name = - String::cast(heap->InternalizeUtf8String("theObject")->ToObjectChecked()); - JSReceiver::SetProperty(global, handle(obj_name), handle(obj), NONE, SLOPPY); - String* prop_name = - String::cast(heap->InternalizeUtf8String("theSlot")->ToObjectChecked()); - Handle<Smi> twenty_three(Smi::FromInt(23), isolate); - JSReceiver::SetProperty( - handle(obj), handle(prop_name), twenty_three, NONE, SLOPPY); + { HandleScope scope(isolate); + Handle<String> func_name = factory->InternalizeUtf8String("theFunction"); + CHECK(JSReceiver::HasLocalProperty(global, func_name)); + Handle<Object> func_value = + Object::GetProperty(global, func_name).ToHandleChecked(); + CHECK(func_value->IsJSFunction()); + Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); + Handle<JSObject> obj = factory->NewJSObject(function); + + Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); + JSReceiver::SetProperty(global, obj_name, obj, NONE, SLOPPY).Check(); + Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); + Handle<Smi> twenty_three(Smi::FromInt(23), isolate); + JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY).Check(); + } heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 5"); - obj_name = - String::cast(heap->InternalizeUtf8String("theObject")->ToObjectChecked()); - CHECK(JSReceiver::HasLocalProperty(global, handle(obj_name))); - CHECK(isolate->context()->global_object()-> - GetProperty(obj_name)->ToObjectChecked()->IsJSObject()); - obj = JSObject::cast(isolate->context()->global_object()-> - GetProperty(obj_name)->ToObjectChecked()); - prop_name = - String::cast(heap->InternalizeUtf8String("theSlot")->ToObjectChecked()); - CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23)); + { HandleScope scope(isolate); + Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); + CHECK(JSReceiver::HasLocalProperty(global, obj_name)); + Handle<Object> object = + Object::GetProperty(global, obj_name).ToHandleChecked(); + CHECK(object->IsJSObject()); + Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); + CHECK_EQ(*Object::GetProperty(object, prop_name).ToHandleChecked(), + Smi::FromInt(23)); + } } @@ -260,16 +250,16 @@ TEST(ObjectGroups) { FLAG_incremental_marking = false; CcTest::InitializeVM(); GlobalHandles* global_handles = CcTest::i_isolate()->global_handles(); - Heap* heap = CcTest::heap(); + TestHeap* heap = CcTest::test_heap(); NumberOfWeakCalls = 0; v8::HandleScope handle_scope(CcTest::isolate()); Handle<Object> g1s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle<Object> g1s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle<Object> g1c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); std::pair<Handle<Object>*, int> g1s1_and_id(&g1s1, 1234); GlobalHandles::MakeWeak(g1s1.location(), reinterpret_cast<void*>(&g1s1_and_id), @@ -284,11 +274,11 @@ TEST(ObjectGroups) { &WeakPointerCallback); Handle<Object> g2s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle<Object> g2s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle<Object> g2c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); std::pair<Handle<Object>*, int> g2s1_and_id(&g2s1, 1234); GlobalHandles::MakeWeak(g2s1.location(), reinterpret_cast<void*>(&g2s1_and_id), @@ -399,7 +389,7 @@ TEST(EmptyObjectGroups) { v8::HandleScope handle_scope(CcTest::isolate()); Handle<Object> object = global_handles->Create( - CcTest::heap()->AllocateFixedArray(1)->ToObjectChecked()); + CcTest::test_heap()->AllocateFixedArray(1).ToObjectChecked()); TestRetainedObjectInfo info; global_handles->AddObjectGroup(NULL, 0, &info); @@ -491,36 +481,6 @@ static intptr_t MemoryInUse() { } -TEST(BootUpMemoryUse) { - intptr_t initial_memory = MemoryInUse(); - // Avoid flakiness. - FLAG_crankshaft = false; - FLAG_concurrent_osr = false; - FLAG_concurrent_recompilation = false; - - // Only Linux has the proc filesystem and only if it is mapped. If it's not - // there we just skip the test. - if (initial_memory >= 0) { - CcTest::InitializeVM(); - intptr_t delta = MemoryInUse() - initial_memory; - printf("delta: %" V8_PTR_PREFIX "d kB\n", delta / 1024); - if (sizeof(initial_memory) == 8) { // 64-bit. - if (v8::internal::Snapshot::IsEnabled()) { - CHECK_LE(delta, 4000 * 1024); - } else { - CHECK_LE(delta, 4500 * 1024); - } - } else { // 32-bit. - if (v8::internal::Snapshot::IsEnabled()) { - CHECK_LE(delta, 3100 * 1024); - } else { - CHECK_LE(delta, 3450 * 1024); - } - } - } -} - - intptr_t ShortLivingIsolate() { v8::Isolate* isolate = v8::Isolate::New(); { v8::Isolate::Scope isolate_scope(isolate); diff --git a/deps/v8/test/cctest/test-mementos.cc b/deps/v8/test/cctest/test-mementos.cc index 1dc38f9af..a377b4a4c 100644 --- a/deps/v8/test/cctest/test-mementos.cc +++ b/deps/v8/test/cctest/test-mementos.cc @@ -39,7 +39,8 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() { heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); // Allocate a string, the GC may suspect a memento behind the string. - Handle<SeqOneByteString> string = isolate->factory()->NewRawOneByteString(12); + Handle<SeqOneByteString> string = + isolate->factory()->NewRawOneByteString(12).ToHandleChecked(); CHECK(*string); // Create an allocation memento behind the string with a garbage allocation diff --git a/deps/v8/test/cctest/test-microtask-delivery.cc b/deps/v8/test/cctest/test-microtask-delivery.cc index 0172726af..e6f38b79b 100644 --- a/deps/v8/test/cctest/test-microtask-delivery.cc +++ b/deps/v8/test/cctest/test-microtask-delivery.cc @@ -95,7 +95,7 @@ TEST(MicrotaskPerIsolateState) { HarmonyIsolate isolate; HandleScope scope(isolate.GetIsolate()); LocalContext context1(isolate.GetIsolate()); - V8::SetAutorunMicrotasks(isolate.GetIsolate(), false); + isolate.GetIsolate()->SetAutorunMicrotasks(false); CompileRun( "var obj = { calls: 0 };"); Handle<Value> obj = CompileRun("obj"); @@ -129,7 +129,7 @@ TEST(MicrotaskPerIsolateState) { LocalContext context4(isolate.GetIsolate()); context4->Global()->Set(String::NewFromUtf8(isolate.GetIsolate(), "obj"), obj); - V8::RunMicrotasks(isolate.GetIsolate()); + isolate.GetIsolate()->RunMicrotasks(); CHECK_EQ(2, CompileRun("obj.calls")->Int32Value()); } } diff --git a/deps/v8/test/cctest/test-object-observe.cc b/deps/v8/test/cctest/test-object-observe.cc index 6bde5b37e..a7b346fc6 100644 --- a/deps/v8/test/cctest/test-object-observe.cc +++ b/deps/v8/test/cctest/test-object-observe.cc @@ -36,6 +36,10 @@ namespace i = v8::internal; TEST(PerIsolateState) { HandleScope scope(CcTest::isolate()); LocalContext context1(CcTest::isolate()); + + Local<Value> foo = v8_str("foo"); + context1->SetSecurityToken(foo); + CompileRun( "var count = 0;" "var calls = 0;" @@ -49,6 +53,7 @@ TEST(PerIsolateState) { Handle<Value> notify_fun2; { LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(foo); context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), obj); notify_fun2 = CompileRun( @@ -57,6 +62,7 @@ TEST(PerIsolateState) { Handle<Value> notify_fun3; { LocalContext context3(CcTest::isolate()); + context3->SetSecurityToken(foo); context3->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), obj); notify_fun3 = CompileRun( @@ -64,6 +70,7 @@ TEST(PerIsolateState) { } { LocalContext context4(CcTest::isolate()); + context4->SetSecurityToken(foo); context4->Global()->Set( String::NewFromUtf8(CcTest::isolate(), "observer"), observer); context4->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "fun1"), @@ -209,59 +216,6 @@ TEST(ObjectHashTableGrowth) { } -TEST(GlobalObjectObservation) { - LocalContext context(CcTest::isolate()); - HandleScope scope(CcTest::isolate()); - Handle<Object> global_proxy = context->Global(); - CompileRun( - "var records = [];" - "var global = this;" - "Object.observe(global, function(r) { [].push.apply(records, r) });" - "global.foo = 'hello';"); - CHECK_EQ(1, CompileRun("records.length")->Int32Value()); - CHECK(global_proxy->StrictEquals(CompileRun("records[0].object"))); - - // Detached, mutating the proxy has no effect. - context->DetachGlobal(); - CompileRun("global.bar = 'goodbye';"); - CHECK_EQ(1, CompileRun("records.length")->Int32Value()); - CompileRun("this.baz = 'goodbye';"); - CHECK_EQ(1, CompileRun("records.length")->Int32Value()); - - // Attached to a different context, should not leak mutations - // to the old context. - context->DetachGlobal(); - { - LocalContext context2(CcTest::isolate()); - CompileRun( - "var records2 = [];" - "var global = this;" - "Object.observe(this, function(r) { [].push.apply(records2, r) });" - "this.v1 = 'context2';"); - context2->DetachGlobal(); - CompileRun( - "global.v2 = 'context2';" - "this.v3 = 'context2';"); - CHECK_EQ(1, CompileRun("records2.length")->Int32Value()); - } - CHECK_EQ(1, CompileRun("records.length")->Int32Value()); - - // Attaching by passing to Context::New - { - // Delegates to Context::New - LocalContext context3( - CcTest::isolate(), NULL, Handle<ObjectTemplate>(), global_proxy); - CompileRun( - "var records3 = [];" - "Object.observe(this, function(r) { [].push.apply(records3, r) });" - "this.qux = 'context3';"); - CHECK_EQ(1, CompileRun("records3.length")->Int32Value()); - CHECK(global_proxy->StrictEquals(CompileRun("records3[0].object"))); - } - CHECK_EQ(1, CompileRun("records.length")->Int32Value()); -} - - struct RecordExpectation { Handle<Value> object; const char* type; @@ -407,18 +361,19 @@ TEST(ObservationWeakMap) { "Object.observe(obj, function(){});" "Object.getNotifier(obj);" "obj = null;"); - i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(CcTest::isolate()); + i::Isolate* i_isolate = CcTest::i_isolate(); i::Handle<i::JSObject> observation_state = i_isolate->factory()->observation_state(); i::Handle<i::JSWeakMap> callbackInfoMap = - i::Handle<i::JSWeakMap>::cast( - i::GetProperty(observation_state, "callbackInfoMap")); + i::Handle<i::JSWeakMap>::cast(i::Object::GetProperty( + i_isolate, observation_state, "callbackInfoMap").ToHandleChecked()); i::Handle<i::JSWeakMap> objectInfoMap = - i::Handle<i::JSWeakMap>::cast( - i::GetProperty(observation_state, "objectInfoMap")); + i::Handle<i::JSWeakMap>::cast(i::Object::GetProperty( + i_isolate, observation_state, "objectInfoMap").ToHandleChecked()); i::Handle<i::JSWeakMap> notifierObjectInfoMap = - i::Handle<i::JSWeakMap>::cast( - i::GetProperty(observation_state, "notifierObjectInfoMap")); + i::Handle<i::JSWeakMap>::cast(i::Object::GetProperty( + i_isolate, observation_state, "notifierObjectInfoMap") + .ToHandleChecked()); CHECK_EQ(1, NumberOfElements(callbackInfoMap)); CHECK_EQ(1, NumberOfElements(objectInfoMap)); CHECK_EQ(1, NumberOfElements(notifierObjectInfoMap)); @@ -429,300 +384,329 @@ TEST(ObservationWeakMap) { } -static bool NamedAccessAlwaysAllowed(Local<Object>, Local<Value>, AccessType, - Local<Value>) { - return true; +static int TestObserveSecurity(Handle<Context> observer_context, + Handle<Context> object_context, + Handle<Context> mutation_context) { + Context::Scope observer_scope(observer_context); + CompileRun("var records = null;" + "var observer = function(r) { records = r };"); + Handle<Value> observer = CompileRun("observer"); + { + Context::Scope object_scope(object_context); + object_context->Global()->Set( + String::NewFromUtf8(CcTest::isolate(), "observer"), observer); + CompileRun("var obj = {};" + "obj.length = 0;" + "Object.observe(obj, observer," + "['add', 'update', 'delete','reconfigure','splice']" + ");"); + Handle<Value> obj = CompileRun("obj"); + { + Context::Scope mutation_scope(mutation_context); + mutation_context->Global()->Set( + String::NewFromUtf8(CcTest::isolate(), "obj"), obj); + CompileRun("obj.foo = 'bar';" + "obj.foo = 'baz';" + "delete obj.foo;" + "Object.defineProperty(obj, 'bar', {value: 'bot'});" + "Array.prototype.push.call(obj, 1, 2, 3);" + "Array.prototype.splice.call(obj, 1, 2, 2, 4);" + "Array.prototype.pop.call(obj);" + "Array.prototype.shift.call(obj);"); + } + } + return CompileRun("records ? records.length : 0")->Int32Value(); } -static bool IndexedAccessAlwaysAllowed(Local<Object>, uint32_t, AccessType, - Local<Value>) { - return true; +TEST(ObserverSecurityAAA) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA = Context::New(isolate); + CHECK_EQ(8, TestObserveSecurity(contextA, contextA, contextA)); } -static AccessType g_access_block_type = ACCESS_GET; -static const uint32_t kBlockedContextIndex = 1337; +TEST(ObserverSecurityA1A2A3) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + + v8::Local<Context> contextA1 = Context::New(isolate); + v8::Local<Context> contextA2 = Context::New(isolate); + v8::Local<Context> contextA3 = Context::New(isolate); + Local<Value> foo = v8_str("foo"); + contextA1->SetSecurityToken(foo); + contextA2->SetSecurityToken(foo); + contextA3->SetSecurityToken(foo); -static bool NamedAccessAllowUnlessBlocked(Local<Object> host, - Local<Value> key, - AccessType type, - Local<Value> data) { - if (type != g_access_block_type) return true; - v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>( - Utils::OpenHandle(*host)->GetIsolate()); - Handle<Object> global = isolate->GetCurrentContext()->Global(); - if (!global->Has(kBlockedContextIndex)) return true; - return !key->IsString() || !key->Equals(data); + CHECK_EQ(8, TestObserveSecurity(contextA1, contextA2, contextA3)); } -static bool IndexedAccessAllowUnlessBlocked(Local<Object> host, - uint32_t index, - AccessType type, - Local<Value> data) { - if (type != g_access_block_type) return true; - v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>( - Utils::OpenHandle(*host)->GetIsolate()); - Handle<Object> global = isolate->GetCurrentContext()->Global(); - if (!global->Has(kBlockedContextIndex)) return true; - return index != data->Uint32Value(); +TEST(ObserverSecurityAAB) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + CHECK_EQ(0, TestObserveSecurity(contextA, contextA, contextB)); } -static bool BlockAccessKeys(Local<Object> host, Local<Value> key, - AccessType type, Local<Value>) { - v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>( - Utils::OpenHandle(*host)->GetIsolate()); - Handle<Object> global = isolate->GetCurrentContext()->Global(); - return type != ACCESS_KEYS || !global->Has(kBlockedContextIndex); +TEST(ObserverSecurityA1A2B) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + + v8::Local<Context> contextA1 = Context::New(isolate); + v8::Local<Context> contextA2 = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + + Local<Value> foo = v8_str("foo"); + contextA1->SetSecurityToken(foo); + contextA2->SetSecurityToken(foo); + + CHECK_EQ(0, TestObserveSecurity(contextA1, contextA2, contextB)); } -static Handle<Object> CreateAccessCheckedObject( - v8::Isolate* isolate, - NamedSecurityCallback namedCallback, - IndexedSecurityCallback indexedCallback, - Handle<Value> data = Handle<Value>()) { - Handle<ObjectTemplate> tmpl = ObjectTemplate::New(isolate); - tmpl->SetAccessCheckCallbacks(namedCallback, indexedCallback, data); - Handle<Object> instance = tmpl->NewInstance(); - Handle<Object> global = instance->CreationContext()->Global(); - global->Set(String::NewFromUtf8(isolate, "obj"), instance); - global->Set(kBlockedContextIndex, v8::True(isolate)); - return instance; +TEST(ObserverSecurityABA) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + CHECK_EQ(0, TestObserveSecurity(contextA, contextB, contextA)); } -TEST(NamedAccessCheck) { - const AccessType types[] = { ACCESS_GET, ACCESS_HAS }; - for (size_t i = 0; i < ARRAY_SIZE(types); ++i) { - HandleScope scope(CcTest::isolate()); - LocalContext context(CcTest::isolate()); - g_access_block_type = types[i]; - Handle<Object> instance = CreateAccessCheckedObject( - CcTest::isolate(), - NamedAccessAllowUnlessBlocked, - IndexedAccessAlwaysAllowed, - String::NewFromUtf8(CcTest::isolate(), "foo")); - CompileRun("var records = null;" - "var objNoCheck = {};" - "var observer = function(r) { records = r };" - "Object.observe(obj, observer);" - "Object.observe(objNoCheck, observer);"); - Handle<Value> obj_no_check = CompileRun("objNoCheck"); - { - LocalContext context2(CcTest::isolate()); - context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), - instance); - context2->Global()->Set( - String::NewFromUtf8(CcTest::isolate(), "objNoCheck"), - obj_no_check); - CompileRun("var records2 = null;" - "var observer2 = function(r) { records2 = r };" - "Object.observe(obj, observer2);" - "Object.observe(objNoCheck, observer2);" - "obj.foo = 'bar';" - "Object.defineProperty(obj, 'foo', {value: 5});" - "Object.defineProperty(obj, 'foo', {get: function(){}});" - "obj.bar = 'baz';" - "objNoCheck.baz = 'quux'"); - const RecordExpectation expected_records2[] = { - { instance, "add", "foo", Handle<Value>() }, - { instance, "update", "foo", - String::NewFromUtf8(CcTest::isolate(), "bar") }, - { instance, "reconfigure", "foo", - Number::New(CcTest::isolate(), 5) }, - { instance, "add", "bar", Handle<Value>() }, - { obj_no_check, "add", "baz", Handle<Value>() }, - }; - EXPECT_RECORDS(CompileRun("records2"), expected_records2); - } - const RecordExpectation expected_records[] = { - { instance, "add", "bar", Handle<Value>() }, - { obj_no_check, "add", "baz", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records"), expected_records); - } +TEST(ObserverSecurityA1BA2) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA1 = Context::New(isolate); + v8::Local<Context> contextA2 = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + + Local<Value> foo = v8_str("foo"); + contextA1->SetSecurityToken(foo); + contextA2->SetSecurityToken(foo); + + CHECK_EQ(0, TestObserveSecurity(contextA1, contextB, contextA2)); } -TEST(IndexedAccessCheck) { - const AccessType types[] = { ACCESS_GET, ACCESS_HAS }; - for (size_t i = 0; i < ARRAY_SIZE(types); ++i) { - HandleScope scope(CcTest::isolate()); - LocalContext context(CcTest::isolate()); - g_access_block_type = types[i]; - Handle<Object> instance = CreateAccessCheckedObject( - CcTest::isolate(), NamedAccessAlwaysAllowed, - IndexedAccessAllowUnlessBlocked, Number::New(CcTest::isolate(), 7)); - CompileRun("var records = null;" - "var objNoCheck = {};" - "var observer = function(r) { records = r };" - "Object.observe(obj, observer);" - "Object.observe(objNoCheck, observer);"); - Handle<Value> obj_no_check = CompileRun("objNoCheck"); - { - LocalContext context2(CcTest::isolate()); - context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), - instance); - context2->Global()->Set( - String::NewFromUtf8(CcTest::isolate(), "objNoCheck"), - obj_no_check); - CompileRun("var records2 = null;" - "var observer2 = function(r) { records2 = r };" - "Object.observe(obj, observer2);" - "Object.observe(objNoCheck, observer2);" - "obj[7] = 'foo';" - "Object.defineProperty(obj, '7', {value: 5});" - "Object.defineProperty(obj, '7', {get: function(){}});" - "obj[8] = 'bar';" - "objNoCheck[42] = 'quux'"); - const RecordExpectation expected_records2[] = { - { instance, "add", "7", Handle<Value>() }, - { instance, "update", "7", - String::NewFromUtf8(CcTest::isolate(), "foo") }, - { instance, "reconfigure", "7", Number::New(CcTest::isolate(), 5) }, - { instance, "add", "8", Handle<Value>() }, - { obj_no_check, "add", "42", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records2"), expected_records2); - } - const RecordExpectation expected_records[] = { - { instance, "add", "8", Handle<Value>() }, - { obj_no_check, "add", "42", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records"), expected_records); +TEST(ObserverSecurityBAA) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + CHECK_EQ(0, TestObserveSecurity(contextB, contextA, contextA)); +} + + +TEST(ObserverSecurityBA1A2) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA1 = Context::New(isolate); + v8::Local<Context> contextA2 = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + + Local<Value> foo = v8_str("foo"); + contextA1->SetSecurityToken(foo); + contextA2->SetSecurityToken(foo); + + CHECK_EQ(0, TestObserveSecurity(contextB, contextA1, contextA2)); +} + + +TEST(ObserverSecurityNotify) { + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + v8::Local<Context> contextA = Context::New(isolate); + v8::Local<Context> contextB = Context::New(isolate); + + Context::Scope scopeA(contextA); + CompileRun("var obj = {};" + "var recordsA = null;" + "var observerA = function(r) { recordsA = r };" + "Object.observe(obj, observerA);"); + Handle<Value> obj = CompileRun("obj"); + + { + Context::Scope scopeB(contextB); + contextB->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), obj); + CompileRun("var recordsB = null;" + "var observerB = function(r) { recordsB = r };" + "Object.observe(obj, observerB);"); + } + + CompileRun("var notifier = Object.getNotifier(obj);" + "notifier.notify({ type: 'update' });"); + CHECK_EQ(1, CompileRun("recordsA ? recordsA.length : 0")->Int32Value()); + + { + Context::Scope scopeB(contextB); + CHECK_EQ(0, CompileRun("recordsB ? recordsB.length : 0")->Int32Value()); } } -TEST(SpliceAccessCheck) { +TEST(HiddenPropertiesLeakage) { HandleScope scope(CcTest::isolate()); LocalContext context(CcTest::isolate()); - g_access_block_type = ACCESS_GET; - Handle<Object> instance = CreateAccessCheckedObject( - CcTest::isolate(), NamedAccessAlwaysAllowed, - IndexedAccessAllowUnlessBlocked, Number::New(CcTest::isolate(), 1)); - CompileRun("var records = null;" - "obj[1] = 'foo';" - "obj.length = 2;" - "var objNoCheck = {1: 'bar', length: 2};" - "observer = function(r) { records = r };" - "Array.observe(obj, observer);" - "Array.observe(objNoCheck, observer);"); - Handle<Value> obj_no_check = CompileRun("objNoCheck"); + CompileRun("var obj = {};" + "var records = null;" + "var observer = function(r) { records = r };" + "Object.observe(obj, observer);"); + Handle<Value> obj = + context->Global()->Get(String::NewFromUtf8(CcTest::isolate(), "obj")); + Handle<Object>::Cast(obj) + ->SetHiddenValue(String::NewFromUtf8(CcTest::isolate(), "foo"), + Null(CcTest::isolate())); + CompileRun(""); // trigger delivery + CHECK(CompileRun("records")->IsNull()); +} + + +TEST(GetNotifierFromOtherContext) { + HandleScope scope(CcTest::isolate()); + LocalContext context(CcTest::isolate()); + CompileRun("var obj = {};"); + Handle<Value> instance = CompileRun("obj"); { LocalContext context2(CcTest::isolate()); context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), instance); - context2->Global()->Set( - String::NewFromUtf8(CcTest::isolate(), "objNoCheck"), obj_no_check); - CompileRun("var records2 = null;" - "var observer2 = function(r) { records2 = r };" - "Array.observe(obj, observer2);" - "Array.observe(objNoCheck, observer2);" - // No one should hear about this: no splice records are emitted - // for access-checked objects - "[].push.call(obj, 5);" - "[].splice.call(obj, 1, 1);" - "[].pop.call(obj);" - "[].pop.call(objNoCheck);"); - // TODO(adamk): Extend EXPECT_RECORDS to be able to assert more things - // about splice records. For this test it's not so important since - // we just want to guarantee the machinery is in operation at all. - const RecordExpectation expected_records2[] = { - { obj_no_check, "splice", "", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records2"), expected_records2); + CHECK(CompileRun("Object.getNotifier(obj)")->IsNull()); } - const RecordExpectation expected_records[] = { - { obj_no_check, "splice", "", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records"), expected_records); } -TEST(DisallowAllForAccessKeys) { +TEST(GetNotifierFromOtherOrigin) { HandleScope scope(CcTest::isolate()); + Handle<Value> foo = String::NewFromUtf8(CcTest::isolate(), "foo"); + Handle<Value> bar = String::NewFromUtf8(CcTest::isolate(), "bar"); LocalContext context(CcTest::isolate()); - Handle<Object> instance = CreateAccessCheckedObject( - CcTest::isolate(), BlockAccessKeys, IndexedAccessAlwaysAllowed); - CompileRun("var records = null;" - "var objNoCheck = {};" - "var observer = function(r) { records = r };" - "Object.observe(obj, observer);" - "Object.observe(objNoCheck, observer);"); - Handle<Value> obj_no_check = CompileRun("objNoCheck"); + context->SetSecurityToken(foo); + CompileRun("var obj = {};"); + Handle<Value> instance = CompileRun("obj"); { LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(bar); context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), instance); - context2->Global()->Set( - String::NewFromUtf8(CcTest::isolate(), "objNoCheck"), obj_no_check); - CompileRun("var records2 = null;" - "var observer2 = function(r) { records2 = r };" - "Object.observe(obj, observer2);" - "Object.observe(objNoCheck, observer2);" - "obj.foo = 'bar';" - "obj[5] = 'baz';" - "objNoCheck.baz = 'quux'"); - const RecordExpectation expected_records2[] = { - { instance, "add", "foo", Handle<Value>() }, - { instance, "add", "5", Handle<Value>() }, - { obj_no_check, "add", "baz", Handle<Value>() }, - }; - EXPECT_RECORDS(CompileRun("records2"), expected_records2); + CHECK(CompileRun("Object.getNotifier(obj)")->IsNull()); } - const RecordExpectation expected_records[] = { - { obj_no_check, "add", "baz", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records"), expected_records); } -TEST(AccessCheckDisallowApiModifications) { +TEST(GetNotifierFromSameOrigin) { HandleScope scope(CcTest::isolate()); + Handle<Value> foo = String::NewFromUtf8(CcTest::isolate(), "foo"); LocalContext context(CcTest::isolate()); - Handle<Object> instance = CreateAccessCheckedObject( - CcTest::isolate(), BlockAccessKeys, IndexedAccessAlwaysAllowed); - CompileRun("var records = null;" - "var observer = function(r) { records = r };" - "Object.observe(obj, observer);"); + context->SetSecurityToken(foo); + CompileRun("var obj = {};"); + Handle<Value> instance = CompileRun("obj"); { LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(foo); context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), instance); - CompileRun("var records2 = null;" - "var observer2 = function(r) { records2 = r };" - "Object.observe(obj, observer2);"); - instance->Set(5, String::NewFromUtf8(CcTest::isolate(), "bar")); - instance->Set(String::NewFromUtf8(CcTest::isolate(), "foo"), - String::NewFromUtf8(CcTest::isolate(), "bar")); - CompileRun(""); // trigger delivery - const RecordExpectation expected_records2[] = { - { instance, "add", "5", Handle<Value>() }, - { instance, "add", "foo", Handle<Value>() } - }; - EXPECT_RECORDS(CompileRun("records2"), expected_records2); + CHECK(CompileRun("Object.getNotifier(obj)")->IsObject()); } - CHECK(CompileRun("records")->IsNull()); } -TEST(HiddenPropertiesLeakage) { +static int GetGlobalObjectsCount() { + CcTest::heap()->EnsureHeapIsIterable(); + int count = 0; + i::HeapIterator it(CcTest::heap()); + for (i::HeapObject* object = it.next(); object != NULL; object = it.next()) + if (object->IsJSGlobalObject()) count++; + return count; +} + + +static void CheckSurvivingGlobalObjectsCount(int expected) { + // We need to collect all garbage twice to be sure that everything + // has been collected. This is because inline caches are cleared in + // the first garbage collection but some of the maps have already + // been marked at that point. Therefore some of the maps are not + // collected until the second garbage collection. + CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags); + CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask); + int count = GetGlobalObjectsCount(); +#ifdef DEBUG + if (count != expected) CcTest::heap()->TracePathToGlobal(); +#endif + CHECK_EQ(expected, count); +} + + +TEST(DontLeakContextOnObserve) { HandleScope scope(CcTest::isolate()); + Handle<Value> foo = String::NewFromUtf8(CcTest::isolate(), "foo"); LocalContext context(CcTest::isolate()); - CompileRun("var obj = {};" - "var records = null;" - "var observer = function(r) { records = r };" - "Object.observe(obj, observer);"); - Handle<Value> obj = - context->Global()->Get(String::NewFromUtf8(CcTest::isolate(), "obj")); - Handle<Object>::Cast(obj) - ->SetHiddenValue(String::NewFromUtf8(CcTest::isolate(), "foo"), - Null(CcTest::isolate())); - CompileRun(""); // trigger delivery - CHECK(CompileRun("records")->IsNull()); + context->SetSecurityToken(foo); + CompileRun("var obj = {};"); + Handle<Value> object = CompileRun("obj"); + { + HandleScope scope(CcTest::isolate()); + LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(foo); + context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), + object); + CompileRun("function observer() {};" + "Object.observe(obj, observer, ['foo', 'bar', 'baz']);" + "Object.unobserve(obj, observer);"); + } + + v8::V8::ContextDisposedNotification(); + CheckSurvivingGlobalObjectsCount(1); +} + + +TEST(DontLeakContextOnGetNotifier) { + HandleScope scope(CcTest::isolate()); + Handle<Value> foo = String::NewFromUtf8(CcTest::isolate(), "foo"); + LocalContext context(CcTest::isolate()); + context->SetSecurityToken(foo); + CompileRun("var obj = {};"); + Handle<Value> object = CompileRun("obj"); + { + HandleScope scope(CcTest::isolate()); + LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(foo); + context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), + object); + CompileRun("Object.getNotifier(obj);"); + } + + v8::V8::ContextDisposedNotification(); + CheckSurvivingGlobalObjectsCount(1); +} + + +TEST(DontLeakContextOnNotifierPerformChange) { + HandleScope scope(CcTest::isolate()); + Handle<Value> foo = String::NewFromUtf8(CcTest::isolate(), "foo"); + LocalContext context(CcTest::isolate()); + context->SetSecurityToken(foo); + CompileRun("var obj = {};"); + Handle<Value> object = CompileRun("obj"); + Handle<Value> notifier = CompileRun("Object.getNotifier(obj)"); + { + HandleScope scope(CcTest::isolate()); + LocalContext context2(CcTest::isolate()); + context2->SetSecurityToken(foo); + context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "obj"), + object); + context2->Global()->Set(String::NewFromUtf8(CcTest::isolate(), "notifier"), + notifier); + CompileRun("var obj2 = {};" + "var notifier2 = Object.getNotifier(obj2);" + "notifier2.performChange.call(" + "notifier, 'foo', function(){})"); + } + + v8::V8::ContextDisposedNotification(); + CheckSurvivingGlobalObjectsCount(1); } diff --git a/deps/v8/test/cctest/test-ordered-hash-table.cc b/deps/v8/test/cctest/test-ordered-hash-table.cc new file mode 100644 index 000000000..48a457f5e --- /dev/null +++ b/deps/v8/test/cctest/test-ordered-hash-table.cc @@ -0,0 +1,244 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include <stdlib.h> + +#include "v8.h" + +#include "cctest.h" +#include "factory.h" + +namespace { + +using namespace v8::internal; + + +void CheckIterResultObject(Isolate* isolate, + Handle<JSObject> result, + Handle<Object> value, + bool done) { + Handle<Object> value_object = + Object::GetProperty(isolate, result, "value").ToHandleChecked(); + Handle<Object> done_object = + Object::GetProperty(isolate, result, "done").ToHandleChecked(); + + CHECK_EQ(*value_object, *value); + CHECK(done_object->IsBoolean()); + CHECK_EQ(done_object->BooleanValue(), done); +} + + +TEST(Set) { + i::FLAG_harmony_collections = true; + + LocalContext context; + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); + HandleScope scope(isolate); + Handle<OrderedHashSet> ordered_set = factory->NewOrderedHashSet(); + CHECK_EQ(2, ordered_set->NumberOfBuckets()); + CHECK_EQ(0, ordered_set->NumberOfElements()); + CHECK_EQ(0, ordered_set->NumberOfDeletedElements()); + + Handle<JSSetIterator> value_iterator = + JSSetIterator::Create(ordered_set, JSSetIterator::kKindValues); + Handle<JSSetIterator> value_iterator_2 = + JSSetIterator::Create(ordered_set, JSSetIterator::kKindValues); + + Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); + Handle<JSObject> obj = factory->NewJSObjectFromMap(map); + CHECK(!ordered_set->Contains(obj)); + ordered_set = OrderedHashSet::Add(ordered_set, obj); + CHECK_EQ(1, ordered_set->NumberOfElements()); + CHECK(ordered_set->Contains(obj)); + ordered_set = OrderedHashSet::Remove(ordered_set, obj); + CHECK_EQ(0, ordered_set->NumberOfElements()); + CHECK(!ordered_set->Contains(obj)); + + // Test for collisions/chaining + Handle<JSObject> obj1 = factory->NewJSObjectFromMap(map); + ordered_set = OrderedHashSet::Add(ordered_set, obj1); + Handle<JSObject> obj2 = factory->NewJSObjectFromMap(map); + ordered_set = OrderedHashSet::Add(ordered_set, obj2); + Handle<JSObject> obj3 = factory->NewJSObjectFromMap(map); + ordered_set = OrderedHashSet::Add(ordered_set, obj3); + CHECK_EQ(3, ordered_set->NumberOfElements()); + CHECK(ordered_set->Contains(obj1)); + CHECK(ordered_set->Contains(obj2)); + CHECK(ordered_set->Contains(obj3)); + + // Test iteration + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator), obj1, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator), obj2, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator), obj3, false); + CheckIterResultObject(isolate, + JSSetIterator::Next(value_iterator), + factory->undefined_value(), + true); + + // Test growth + ordered_set = OrderedHashSet::Add(ordered_set, obj); + Handle<JSObject> obj4 = factory->NewJSObjectFromMap(map); + ordered_set = OrderedHashSet::Add(ordered_set, obj4); + CHECK(ordered_set->Contains(obj)); + CHECK(ordered_set->Contains(obj1)); + CHECK(ordered_set->Contains(obj2)); + CHECK(ordered_set->Contains(obj3)); + CHECK(ordered_set->Contains(obj4)); + CHECK_EQ(5, ordered_set->NumberOfElements()); + CHECK_EQ(0, ordered_set->NumberOfDeletedElements()); + CHECK_EQ(4, ordered_set->NumberOfBuckets()); + + // Test iteration after growth + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator_2), obj1, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator_2), obj2, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator_2), obj3, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator_2), obj, false); + CheckIterResultObject( + isolate, JSSetIterator::Next(value_iterator_2), obj4, false); + CheckIterResultObject(isolate, + JSSetIterator::Next(value_iterator_2), + factory->undefined_value(), + true); + + // Test shrinking + ordered_set = OrderedHashSet::Remove(ordered_set, obj); + ordered_set = OrderedHashSet::Remove(ordered_set, obj1); + ordered_set = OrderedHashSet::Remove(ordered_set, obj2); + ordered_set = OrderedHashSet::Remove(ordered_set, obj3); + CHECK_EQ(1, ordered_set->NumberOfElements()); + CHECK_EQ(2, ordered_set->NumberOfBuckets()); +} + + +TEST(Map) { + i::FLAG_harmony_collections = true; + + LocalContext context; + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); + HandleScope scope(isolate); + Handle<OrderedHashMap> ordered_map = factory->NewOrderedHashMap(); + CHECK_EQ(2, ordered_map->NumberOfBuckets()); + CHECK_EQ(0, ordered_map->NumberOfElements()); + CHECK_EQ(0, ordered_map->NumberOfDeletedElements()); + + Handle<JSMapIterator> value_iterator = + JSMapIterator::Create(ordered_map, JSMapIterator::kKindValues); + Handle<JSMapIterator> key_iterator = + JSMapIterator::Create(ordered_map, JSMapIterator::kKindKeys); + + Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); + Handle<JSObject> obj = factory->NewJSObjectFromMap(map); + Handle<JSObject> val = factory->NewJSObjectFromMap(map); + CHECK(ordered_map->Lookup(obj)->IsTheHole()); + ordered_map = OrderedHashMap::Put(ordered_map, obj, val); + CHECK_EQ(1, ordered_map->NumberOfElements()); + CHECK(ordered_map->Lookup(obj)->SameValue(*val)); + ordered_map = OrderedHashMap::Put( + ordered_map, obj, factory->the_hole_value()); + CHECK_EQ(0, ordered_map->NumberOfElements()); + CHECK(ordered_map->Lookup(obj)->IsTheHole()); + + // Test for collisions/chaining + Handle<JSObject> obj1 = factory->NewJSObjectFromMap(map); + Handle<JSObject> obj2 = factory->NewJSObjectFromMap(map); + Handle<JSObject> obj3 = factory->NewJSObjectFromMap(map); + Handle<JSObject> val1 = factory->NewJSObjectFromMap(map); + Handle<JSObject> val2 = factory->NewJSObjectFromMap(map); + Handle<JSObject> val3 = factory->NewJSObjectFromMap(map); + ordered_map = OrderedHashMap::Put(ordered_map, obj1, val1); + ordered_map = OrderedHashMap::Put(ordered_map, obj2, val2); + ordered_map = OrderedHashMap::Put(ordered_map, obj3, val3); + CHECK_EQ(3, ordered_map->NumberOfElements()); + CHECK(ordered_map->Lookup(obj1)->SameValue(*val1)); + CHECK(ordered_map->Lookup(obj2)->SameValue(*val2)); + CHECK(ordered_map->Lookup(obj3)->SameValue(*val3)); + + // Test iteration + CheckIterResultObject( + isolate, JSMapIterator::Next(value_iterator), val1, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(value_iterator), val2, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(value_iterator), val3, false); + CheckIterResultObject(isolate, + JSMapIterator::Next(value_iterator), + factory->undefined_value(), + true); + + // Test growth + ordered_map = OrderedHashMap::Put(ordered_map, obj, val); + Handle<JSObject> obj4 = factory->NewJSObjectFromMap(map); + Handle<JSObject> val4 = factory->NewJSObjectFromMap(map); + ordered_map = OrderedHashMap::Put(ordered_map, obj4, val4); + CHECK(ordered_map->Lookup(obj)->SameValue(*val)); + CHECK(ordered_map->Lookup(obj1)->SameValue(*val1)); + CHECK(ordered_map->Lookup(obj2)->SameValue(*val2)); + CHECK(ordered_map->Lookup(obj3)->SameValue(*val3)); + CHECK(ordered_map->Lookup(obj4)->SameValue(*val4)); + CHECK_EQ(5, ordered_map->NumberOfElements()); + CHECK_EQ(4, ordered_map->NumberOfBuckets()); + + // Test iteration after growth + CheckIterResultObject( + isolate, JSMapIterator::Next(key_iterator), obj1, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(key_iterator), obj2, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(key_iterator), obj3, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(key_iterator), obj, false); + CheckIterResultObject( + isolate, JSMapIterator::Next(key_iterator), obj4, false); + CheckIterResultObject(isolate, + JSMapIterator::Next(key_iterator), + factory->undefined_value(), + true); + + // Test shrinking + ordered_map = OrderedHashMap::Put( + ordered_map, obj, factory->the_hole_value()); + ordered_map = OrderedHashMap::Put( + ordered_map, obj1, factory->the_hole_value()); + ordered_map = OrderedHashMap::Put( + ordered_map, obj2, factory->the_hole_value()); + ordered_map = OrderedHashMap::Put( + ordered_map, obj3, factory->the_hole_value()); + CHECK_EQ(1, ordered_map->NumberOfElements()); + CHECK_EQ(2, ordered_map->NumberOfBuckets()); +} + + +} diff --git a/deps/v8/test/cctest/test-parsing.cc b/deps/v8/test/cctest/test-parsing.cc index 2746388bb..58734d054 100644 --- a/deps/v8/test/cctest/test-parsing.cc +++ b/deps/v8/test/cctest/test-parsing.cc @@ -144,24 +144,36 @@ TEST(ScanHTMLEndComments) { int marker; CcTest::i_isolate()->stack_guard()->SetStackLimit( reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); - + uintptr_t stack_limit = CcTest::i_isolate()->stack_guard()->real_climit(); for (int i = 0; tests[i]; i++) { - v8::Handle<v8::String> source = v8::String::NewFromUtf8( - isolate, tests[i], v8::String::kNormalString, i::StrLength(tests[i])); - v8::ScriptData* data = v8::ScriptData::PreCompile(source); - CHECK(data != NULL && !data->HasError()); - delete data; + const i::byte* source = + reinterpret_cast<const i::byte*>(tests[i]); + i::Utf8ToUtf16CharacterStream stream(source, i::StrLength(tests[i])); + i::CompleteParserRecorder log; + i::Scanner scanner(CcTest::i_isolate()->unicode_cache()); + scanner.Initialize(&stream); + i::PreParser preparser(&scanner, &log, stack_limit); + preparser.set_allow_lazy(true); + i::PreParser::PreParseResult result = preparser.PreParseProgram(); + CHECK_EQ(i::PreParser::kPreParseSuccess, result); + i::ScriptData data(log.ExtractData()); + CHECK(!data.has_error()); } for (int i = 0; fail_tests[i]; i++) { - v8::Handle<v8::String> source = - v8::String::NewFromUtf8(isolate, - fail_tests[i], - v8::String::kNormalString, - i::StrLength(fail_tests[i])); - v8::ScriptData* data = v8::ScriptData::PreCompile(source); - CHECK(data == NULL || data->HasError()); - delete data; + const i::byte* source = + reinterpret_cast<const i::byte*>(fail_tests[i]); + i::Utf8ToUtf16CharacterStream stream(source, i::StrLength(fail_tests[i])); + i::CompleteParserRecorder log; + i::Scanner scanner(CcTest::i_isolate()->unicode_cache()); + scanner.Initialize(&stream); + i::PreParser preparser(&scanner, &log, stack_limit); + preparser.set_allow_lazy(true); + i::PreParser::PreParseResult result = preparser.PreParseProgram(); + // Even in the case of a syntax error, kPreParseSuccess is returned. + CHECK_EQ(i::PreParser::kPreParseSuccess, result); + i::ScriptData data(log.ExtractData()); + CHECK(data.has_error()); } } @@ -180,7 +192,7 @@ class ScriptResource : public v8::String::ExternalAsciiStringResource { }; -TEST(Preparsing) { +TEST(UsingCachedData) { v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope handles(isolate); v8::Local<v8::Context> context = v8::Context::New(isolate); @@ -203,60 +215,22 @@ TEST(Preparsing) { "var y = { get getter() { return 42; }, " " set setter(v) { this.value = v; }};"; int source_length = i::StrLength(source); - const char* error_source = "var x = y z;"; - int error_source_length = i::StrLength(error_source); - v8::ScriptData* preparse = v8::ScriptData::PreCompile(v8::String::NewFromUtf8( - isolate, source, v8::String::kNormalString, source_length)); - CHECK(!preparse->HasError()); - bool lazy_flag = i::FLAG_lazy; - { - i::FLAG_lazy = true; - ScriptResource* resource = new ScriptResource(source, source_length); - v8::ScriptCompiler::Source script_source( - v8::String::NewExternal(isolate, resource), - new v8::ScriptCompiler::CachedData( - reinterpret_cast<const uint8_t*>(preparse->Data()), - preparse->Length())); - v8::ScriptCompiler::Compile(isolate, - &script_source); - } + // ScriptResource will be deleted when the corresponding String is GCd. + v8::ScriptCompiler::Source script_source(v8::String::NewExternal( + isolate, new ScriptResource(source, source_length))); + i::FLAG_min_preparse_length = 0; + v8::ScriptCompiler::Compile(isolate, &script_source, + v8::ScriptCompiler::kProduceDataToCache); + CHECK(script_source.GetCachedData()); - { - i::FLAG_lazy = false; - - ScriptResource* resource = new ScriptResource(source, source_length); - v8::ScriptCompiler::Source script_source( - v8::String::NewExternal(isolate, resource), - new v8::ScriptCompiler::CachedData( - reinterpret_cast<const uint8_t*>(preparse->Data()), - preparse->Length())); - v8::ScriptCompiler::CompileUnbound(isolate, &script_source); - } - delete preparse; + // Compile the script again, using the cached data. + bool lazy_flag = i::FLAG_lazy; + i::FLAG_lazy = true; + v8::ScriptCompiler::Compile(isolate, &script_source); + i::FLAG_lazy = false; + v8::ScriptCompiler::CompileUnbound(isolate, &script_source); i::FLAG_lazy = lazy_flag; - - // Syntax error. - v8::ScriptData* error_preparse = v8::ScriptData::PreCompile( - v8::String::NewFromUtf8(isolate, - error_source, - v8::String::kNormalString, - error_source_length)); - CHECK(error_preparse->HasError()); - i::ScriptDataImpl *pre_impl = - reinterpret_cast<i::ScriptDataImpl*>(error_preparse); - i::Scanner::Location error_location = - pre_impl->MessageLocation(); - // Error is at "z" in source, location 10..11. - CHECK_EQ(10, error_location.beg_pos); - CHECK_EQ(11, error_location.end_pos); - // Should not crash. - const char* message = pre_impl->BuildMessage(); - i::Vector<const char*> args = pre_impl->BuildArgs(); - CHECK_GT(strlen(message), 0); - args.Dispose(); - i::DeleteArray(message); - delete error_preparse; } @@ -303,56 +277,6 @@ TEST(PreparseFunctionDataIsUsed) { } -TEST(PreparseSymbolDataIsUsed) { - // This tests that we actually do use the symbol data generated by the - // preparser. - - // Only do one compilation pass in this test (otherwise we will parse the - // source code again without preparse data and it will fail). - i::FLAG_crankshaft = false; - - // Make preparsing work for short scripts. - i::FLAG_min_preparse_length = 0; - - v8::Isolate* isolate = CcTest::isolate(); - v8::HandleScope handles(isolate); - v8::Local<v8::Context> context = v8::Context::New(isolate); - v8::Context::Scope context_scope(context); - int marker; - CcTest::i_isolate()->stack_guard()->SetStackLimit( - reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); - - // Note that the ( before function makes the function not lazily compiled. - const char* good_code = - "(function weird() { var foo = 26; return foo; })()"; - - // Insert an undefined identifier. If the preparser data is used, the symbol - // stream is used instead, and this identifier resolves to "foo". - const char* bad_code = - "(function weird() { var foo = 26; return wut; })()"; - - v8::ScriptCompiler::Source good_source(v8_str(good_code)); - v8::ScriptCompiler::Compile(isolate, &good_source, - v8::ScriptCompiler::kProduceDataToCache); - - const v8::ScriptCompiler::CachedData* cached_data = - good_source.GetCachedData(); - CHECK(cached_data->data != NULL); - CHECK_GT(cached_data->length, 0); - - // Now compile the erroneous code with the good preparse data. If the preparse - // data is used, we will see a second occurrence of "foo" instead of the - // unknown "wut". - v8::ScriptCompiler::Source bad_source( - v8_str(bad_code), new v8::ScriptCompiler::CachedData( - cached_data->data, cached_data->length)); - v8::Local<v8::Value> result = - v8::ScriptCompiler::Compile(isolate, &bad_source)->Run(); - CHECK(result->IsInt32()); - CHECK_EQ(26, result->Int32Value()); -} - - TEST(StandAlonePreParser) { v8::V8::Initialize(); @@ -384,7 +308,7 @@ TEST(StandAlonePreParser) { preparser.set_allow_natives_syntax(true); i::PreParser::PreParseResult result = preparser.PreParseProgram(); CHECK_EQ(i::PreParser::kPreParseSuccess, result); - i::ScriptDataImpl data(log.ExtractData()); + i::ScriptData data(log.ExtractData()); CHECK(!data.has_error()); } } @@ -418,7 +342,7 @@ TEST(StandAlonePreParserNoNatives) { preparser.set_allow_lazy(true); i::PreParser::PreParseResult result = preparser.PreParseProgram(); CHECK_EQ(i::PreParser::kPreParseSuccess, result); - i::ScriptDataImpl data(log.ExtractData()); + i::ScriptData data(log.ExtractData()); // Data contains syntax error. CHECK(data.has_error()); } @@ -461,62 +385,6 @@ TEST(PreparsingObjectLiterals) { } } -namespace v8 { -namespace internal { - -struct CompleteParserRecorderFriend { - static void FakeWritingSymbolIdInPreParseData(CompleteParserRecorder* log, - int number) { - log->WriteNumber(number); - if (log->symbol_id_ < number + 1) { - log->symbol_id_ = number + 1; - } - } - static int symbol_position(CompleteParserRecorder* log) { - return log->symbol_store_.size(); - } - static int symbol_ids(CompleteParserRecorder* log) { - return log->symbol_id_; - } - static int function_position(CompleteParserRecorder* log) { - return log->function_store_.size(); - } -}; - -} -} - - -TEST(StoringNumbersInPreParseData) { - // Symbol IDs are split into chunks of 7 bits for storing. This is a - // regression test for a bug where a symbol id was incorrectly stored if some - // of the chunks in the middle were all zeros. - typedef i::CompleteParserRecorderFriend F; - i::CompleteParserRecorder log; - for (int i = 0; i < 18; ++i) { - F::FakeWritingSymbolIdInPreParseData(&log, 1 << i); - } - for (int i = 1; i < 18; ++i) { - F::FakeWritingSymbolIdInPreParseData(&log, (1 << i) + 1); - } - for (int i = 6; i < 18; ++i) { - F::FakeWritingSymbolIdInPreParseData(&log, (3 << i) + (5 << (i - 6))); - } - i::Vector<unsigned> store = log.ExtractData(); - i::ScriptDataImpl script_data(store); - script_data.Initialize(); - // Check that we get the same symbols back. - for (int i = 0; i < 18; ++i) { - CHECK_EQ(1 << i, script_data.GetSymbolIdentifier()); - } - for (int i = 1; i < 18; ++i) { - CHECK_EQ((1 << i) + 1, script_data.GetSymbolIdentifier()); - } - for (int i = 6; i < 18; ++i) { - CHECK_EQ((3 << i) + (5 << (i - 6)), script_data.GetSymbolIdentifier()); - } -} - TEST(RegressChromium62639) { v8::V8::Initialize(); @@ -536,9 +404,17 @@ TEST(RegressChromium62639) { i::Utf8ToUtf16CharacterStream stream( reinterpret_cast<const i::byte*>(program), static_cast<unsigned>(strlen(program))); - i::ScriptDataImpl* data = i::PreParserApi::PreParse(isolate, &stream); - CHECK(data->HasError()); - delete data; + i::CompleteParserRecorder log; + i::Scanner scanner(CcTest::i_isolate()->unicode_cache()); + scanner.Initialize(&stream); + i::PreParser preparser(&scanner, &log, + CcTest::i_isolate()->stack_guard()->real_climit()); + preparser.set_allow_lazy(true); + i::PreParser::PreParseResult result = preparser.PreParseProgram(); + // Even in the case of a syntax error, kPreParseSuccess is returned. + CHECK_EQ(i::PreParser::kPreParseSuccess, result); + i::ScriptData data(log.ExtractData()); + CHECK(data.has_error()); } @@ -560,19 +436,25 @@ TEST(Regress928) { "var bar = function () { /* second */ }"; v8::HandleScope handles(CcTest::isolate()); - i::Handle<i::String> source( - factory->NewStringFromAscii(i::CStrVector(program))); + i::Handle<i::String> source = factory->NewStringFromAsciiChecked(program); i::GenericStringUtf16CharacterStream stream(source, 0, source->length()); - i::ScriptDataImpl* data = i::PreParserApi::PreParse(isolate, &stream); - CHECK(!data->HasError()); - - data->Initialize(); + i::CompleteParserRecorder log; + i::Scanner scanner(CcTest::i_isolate()->unicode_cache()); + scanner.Initialize(&stream); + i::PreParser preparser(&scanner, &log, + CcTest::i_isolate()->stack_guard()->real_climit()); + preparser.set_allow_lazy(true); + i::PreParser::PreParseResult result = preparser.PreParseProgram(); + CHECK_EQ(i::PreParser::kPreParseSuccess, result); + i::ScriptData data(log.ExtractData()); + CHECK(!data.has_error()); + data.Initialize(); int first_function = static_cast<int>(strstr(program, "function") - program); int first_lbrace = first_function + i::StrLength("function () "); CHECK_EQ('{', program[first_lbrace]); - i::FunctionEntry entry1 = data->GetFunctionEntry(first_lbrace); + i::FunctionEntry entry1 = data.GetFunctionEntry(first_lbrace); CHECK(!entry1.is_valid()); int second_function = @@ -580,10 +462,9 @@ TEST(Regress928) { int second_lbrace = second_function + i::StrLength("function () "); CHECK_EQ('{', program[second_lbrace]); - i::FunctionEntry entry2 = data->GetFunctionEntry(second_lbrace); + i::FunctionEntry entry2 = data.GetFunctionEntry(second_lbrace); CHECK(entry2.is_valid()); CHECK_EQ('}', program[entry2.end_pos() - 1]); - delete data; } @@ -651,11 +532,11 @@ void TestCharacterStream(const char* ascii_source, uc16_buffer[i] = static_cast<i::uc16>(ascii_source[i]); } i::Vector<const char> ascii_vector(ascii_source, static_cast<int>(length)); - i::Handle<i::String> ascii_string( - factory->NewStringFromAscii(ascii_vector)); + i::Handle<i::String> ascii_string = + factory->NewStringFromAscii(ascii_vector).ToHandleChecked(); TestExternalResource resource(uc16_buffer.get(), length); i::Handle<i::String> uc16_string( - factory->NewExternalStringFromTwoByte(&resource)); + factory->NewExternalStringFromTwoByte(&resource).ToHandleChecked()); i::ExternalTwoByteStringUtf16CharacterStream uc16_stream( i::Handle<i::ExternalTwoByteString>::cast(uc16_string), start, end); @@ -1230,8 +1111,8 @@ TEST(ScopePositions) { source_data[i].outer_suffix); // Parse program source. - i::Handle<i::String> source( - factory->NewStringFromUtf8(i::CStrVector(program.start()))); + i::Handle<i::String> source = factory->NewStringFromUtf8( + i::CStrVector(program.start())).ToHandleChecked(); CHECK_EQ(source->length(), kProgramSize); i::Handle<i::Script> script = factory->NewScript(source); i::CompilationInfoWithZone info(script); @@ -1260,7 +1141,7 @@ TEST(ScopePositions) { } -i::Handle<i::String> FormatMessage(i::ScriptDataImpl* data) { +i::Handle<i::String> FormatMessage(i::ScriptData* data) { i::Isolate* isolate = CcTest::i_isolate(); i::Factory* factory = isolate->factory(); const char* message = data->BuildMessage(); @@ -1272,16 +1153,14 @@ i::Handle<i::String> FormatMessage(i::ScriptDataImpl* data) { i::JSArray::SetElement( args_array, i, v8::Utils::OpenHandle(*v8::String::NewFromUtf8( CcTest::isolate(), args[i])), - NONE, i::SLOPPY); + NONE, i::SLOPPY).Check(); } i::Handle<i::JSObject> builtins(isolate->js_builtins_object()); - i::Handle<i::Object> format_fun = - i::GetProperty(builtins, "FormatMessage"); + i::Handle<i::Object> format_fun = i::Object::GetProperty( + isolate, builtins, "FormatMessage").ToHandleChecked(); i::Handle<i::Object> arg_handles[] = { format, args_array }; - bool has_exception = false; i::Handle<i::Object> result = i::Execution::Call( - isolate, format_fun, builtins, 2, arg_handles, &has_exception); - CHECK(!has_exception); + isolate, format_fun, builtins, 2, arg_handles).ToHandleChecked(); CHECK(result->IsString()); for (int i = 0; i < args.length(); i++) { i::DeleteArray(args[i]); @@ -1342,7 +1221,7 @@ void TestParserSyncWithFlags(i::Handle<i::String> source, i::PreParser::PreParseResult result = preparser.PreParseProgram(); CHECK_EQ(i::PreParser::kPreParseSuccess, result); } - i::ScriptDataImpl data(log.ExtractData()); + i::ScriptData data(log.ExtractData()); // Parse the data i::FunctionLiteral* function; @@ -1360,12 +1239,11 @@ void TestParserSyncWithFlags(i::Handle<i::String> source, if (function == NULL) { // Extract exception from the parser. CHECK(isolate->has_pending_exception()); - i::MaybeObject* maybe_object = isolate->pending_exception(); - i::JSObject* exception = NULL; - CHECK(maybe_object->To(&exception)); - i::Handle<i::JSObject> exception_handle(exception); + i::Handle<i::JSObject> exception_handle( + i::JSObject::cast(isolate->pending_exception())); i::Handle<i::String> message_string = - i::Handle<i::String>::cast(i::GetProperty(exception_handle, "message")); + i::Handle<i::String>::cast(i::Object::GetProperty( + isolate, exception_handle, "message").ToHandleChecked()); if (result == kSuccess) { i::OS::Print( @@ -1390,7 +1268,7 @@ void TestParserSyncWithFlags(i::Handle<i::String> source, } // Check that preparser and parser produce the same error. i::Handle<i::String> preparser_message = FormatMessage(&data); - if (!message_string->Equals(*preparser_message)) { + if (!i::String::Equals(message_string, preparser_message)) { i::OS::Print( "Expected parser and preparser to produce the same error on:\n" "\t%s\n" @@ -1427,7 +1305,7 @@ void TestParserSync(const char* source, size_t flag_list_length, ParserSyncTestResult result = kSuccessOrError) { i::Handle<i::String> str = - CcTest::i_isolate()->factory()->NewStringFromAscii(i::CStrVector(source)); + CcTest::i_isolate()->factory()->NewStringFromAsciiChecked(source); for (int bits = 0; bits < (1 << flag_list_length); bits++) { i::EnumSet<ParserFlag> flags; for (size_t flag_index = 0; flag_index < flag_list_length; flag_index++) { @@ -1480,9 +1358,10 @@ TEST(ParserSync) { "break", "break label", "break\nlabel", - "return", - "return 12", - "return\n12", + // TODO(marja): activate once parsing 'return' is merged into ParserBase. + // "return", + // "return 12", + // "return\n12", "with ({}) ;", "with ({}) {}", "with ({}) 12", @@ -1555,10 +1434,9 @@ TEST(ParserSync) { } -TEST(PreparserStrictOctal) { +TEST(StrictOctal) { // Test that syntax error caused by octal literal is reported correctly as // such (issue 2220). - v8::internal::FLAG_min_preparse_length = 1; // Force preparsing. v8::V8::Initialize(); v8::HandleScope scope(CcTest::isolate()); v8::Context::Scope context_scope( @@ -2085,77 +1963,55 @@ TEST(NoErrorsIdentifierNames) { TEST(DontRegressPreParserDataSizes) { - // These tests make sure that PreParser doesn't start producing less data. - + // These tests make sure that Parser doesn't start producing less "preparse + // data" (data which the embedder can cache). v8::V8::Initialize(); + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope handles(isolate); + int marker; CcTest::i_isolate()->stack_guard()->SetStackLimit( reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); struct TestCase { const char* program; - int symbols; int functions; } test_cases[] = { - // Labels and variables are recorded as symbols. - {"{label: 42}", 1, 0}, {"{label: 42; label2: 43}", 2, 0}, - {"var x = 42;", 1, 0}, {"var x = 42, y = 43;", 2, 0}, - {"var x = {y: 1};", 2, 0}, - {"var x = {}; x.y = 1", 2, 0}, - // "get" is recorded as a symbol too. - {"var x = {get foo(){} };", 3, 1}, - // When keywords are used as identifiers, they're logged as symbols, too: - {"var x = {if: 1};", 2, 0}, - {"var x = {}; x.if = 1", 2, 0}, - {"var x = {get if(){} };", 3, 1}, - // Functions - {"function foo() {}", 1, 1}, {"function foo() {} function bar() {}", 2, 2}, - // Labels, variables and functions insize lazy functions are not recorded. - {"function lazy() { var a, b, c; }", 1, 1}, - {"function lazy() { a: 1; b: 2; c: 3; }", 1, 1}, - {"function lazy() { function a() {} function b() {} function c() {} }", 1, - 1}, - {NULL, 0, 0} - }; - // Each function adds 5 elements to the preparse function data. - const int kDataPerFunction = 5; - - typedef i::CompleteParserRecorderFriend F; - uintptr_t stack_limit = CcTest::i_isolate()->stack_guard()->real_climit(); + // No functions. + {"var x = 42;", 0}, + // Functions. + {"function foo() {}", 1}, {"function foo() {} function bar() {}", 2}, + // Getter / setter functions are recorded as functions if they're on the top + // level. + {"var x = {get foo(){} };", 1}, + // Functions insize lazy functions are not recorded. + {"function lazy() { function a() {} function b() {} function c() {} }", 1}, + {"function lazy() { var x = {get foo(){} } }", 1}, + {NULL, 0} + }; + for (int i = 0; test_cases[i].program; i++) { const char* program = test_cases[i].program; - i::Utf8ToUtf16CharacterStream stream( - reinterpret_cast<const i::byte*>(program), - static_cast<unsigned>(strlen(program))); - i::CompleteParserRecorder log; - i::Scanner scanner(CcTest::i_isolate()->unicode_cache()); - scanner.Initialize(&stream); + i::Factory* factory = CcTest::i_isolate()->factory(); + i::Handle<i::String> source = + factory->NewStringFromUtf8(i::CStrVector(program)).ToHandleChecked(); + i::Handle<i::Script> script = factory->NewScript(source); + i::CompilationInfoWithZone info(script); + i::ScriptData* data = NULL; + info.SetCachedData(&data, i::PRODUCE_CACHED_DATA); + i::Parser::Parse(&info, true); + CHECK(data); + CHECK(!data->HasError()); - i::PreParser preparser(&scanner, &log, stack_limit); - preparser.set_allow_lazy(true); - preparser.set_allow_natives_syntax(true); - i::PreParser::PreParseResult result = preparser.PreParseProgram(); - CHECK_EQ(i::PreParser::kPreParseSuccess, result); - if (F::symbol_ids(&log) != test_cases[i].symbols) { - i::OS::Print( - "Expected preparse data for program:\n" - "\t%s\n" - "to contain %d symbols, however, received %d symbols.\n", - program, test_cases[i].symbols, F::symbol_ids(&log)); - CHECK(false); - } - if (F::function_position(&log) != - test_cases[i].functions * kDataPerFunction) { + if (data->function_count() != test_cases[i].functions) { i::OS::Print( "Expected preparse data for program:\n" "\t%s\n" "to contain %d functions, however, received %d functions.\n", program, test_cases[i].functions, - F::function_position(&log) / kDataPerFunction); + data->function_count()); CHECK(false); } - i::ScriptDataImpl data(log.ExtractData()); - CHECK(!data.has_error()); } } @@ -2557,13 +2413,22 @@ TEST(InvalidLeftHandSide) { "this[foo]", "new foo()[bar]", "new foo().bar", + "foo()", + "foo(bar)", + "foo[bar]()", + "foo.bar()", + "this()", + "this.foo()", + "this[foo].bar()", + "this.foo[foo].bar(this)(bar)[foo]()", NULL }; // Bad left hand sides for assigment or prefix / postfix operations. const char* bad_statement_data_common[] = { "2", - "foo()", + "new foo", + "new foo()", "null", "if", // Unexpected token "{x: 1}", // Unexpected token diff --git a/deps/v8/test/cctest/test-platform.cc b/deps/v8/test/cctest/test-platform.cc index b9f8bafe4..6b28b1895 100644 --- a/deps/v8/test/cctest/test-platform.cc +++ b/deps/v8/test/cctest/test-platform.cc @@ -59,7 +59,7 @@ using namespace ::v8::internal; do { \ ASM("mov x16, sp; str x16, %0" : "=g" (sp_addr)); \ } while (0) -#elif defined(__MIPSEL__) +#elif defined(__MIPSEB__) || defined(__MIPSEL__) #define GET_STACK_POINTER() \ static int sp_addr = 0; \ do { \ diff --git a/deps/v8/test/cctest/test-regexp.cc b/deps/v8/test/cctest/test-regexp.cc index 712fec056..10b227c8e 100644 --- a/deps/v8/test/cctest/test-regexp.cc +++ b/deps/v8/test/cctest/test-regexp.cc @@ -506,9 +506,9 @@ static RegExpNode* Compile(const char* input, &compile_data, zone)) return NULL; Handle<String> pattern = isolate->factory()-> - NewStringFromUtf8(CStrVector(input)); + NewStringFromUtf8(CStrVector(input)).ToHandleChecked(); Handle<String> sample_subject = - isolate->factory()->NewStringFromUtf8(CStrVector("")); + isolate->factory()->NewStringFromUtf8(CStrVector("")).ToHandleChecked(); RegExpEngine::Compile(&compile_data, false, false, @@ -745,12 +745,12 @@ TEST(MacroAssemblerNativeSuccess) { m.Succeed(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("")); + Handle<String> source = factory->NewStringFromStaticAscii(""); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); int captures[4] = {42, 37, 87, 117}; - Handle<String> input = factory->NewStringFromAscii(CStrVector("foofoo")); + Handle<String> input = factory->NewStringFromStaticAscii("foofoo"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); const byte* start_adr = reinterpret_cast<const byte*>(seq_input->GetCharsAddress()); @@ -799,12 +799,12 @@ TEST(MacroAssemblerNativeSimple) { m.Bind(&fail); m.Fail(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("^foo")); + Handle<String> source = factory->NewStringFromStaticAscii("^foo"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); int captures[4] = {42, 37, 87, 117}; - Handle<String> input = factory->NewStringFromAscii(CStrVector("foofoo")); + Handle<String> input = factory->NewStringFromStaticAscii("foofoo"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -822,7 +822,7 @@ TEST(MacroAssemblerNativeSimple) { CHECK_EQ(-1, captures[2]); CHECK_EQ(-1, captures[3]); - input = factory->NewStringFromAscii(CStrVector("barbarbar")); + input = factory->NewStringFromStaticAscii("barbarbar"); seq_input = Handle<SeqOneByteString>::cast(input); start_adr = seq_input->GetCharsAddress(); @@ -865,15 +865,15 @@ TEST(MacroAssemblerNativeSimpleUC16) { m.Bind(&fail); m.Fail(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("^foo")); + Handle<String> source = factory->NewStringFromStaticAscii("^foo"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); int captures[4] = {42, 37, 87, 117}; const uc16 input_data[6] = {'f', 'o', 'o', 'f', 'o', static_cast<uc16>(0x2603)}; - Handle<String> input = - factory->NewStringFromTwoByte(Vector<const uc16>(input_data, 6)); + Handle<String> input = factory->NewStringFromTwoByte( + Vector<const uc16>(input_data, 6)).ToHandleChecked(); Handle<SeqTwoByteString> seq_input = Handle<SeqTwoByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -893,7 +893,8 @@ TEST(MacroAssemblerNativeSimpleUC16) { const uc16 input_data2[9] = {'b', 'a', 'r', 'b', 'a', 'r', 'b', 'a', static_cast<uc16>(0x2603)}; - input = factory->NewStringFromTwoByte(Vector<const uc16>(input_data2, 9)); + input = factory->NewStringFromTwoByte( + Vector<const uc16>(input_data2, 9)).ToHandleChecked(); seq_input = Handle<SeqTwoByteString>::cast(input); start_adr = seq_input->GetCharsAddress(); @@ -928,11 +929,11 @@ TEST(MacroAssemblerNativeBacktrack) { m.Bind(&backtrack); m.Fail(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("..........")); + Handle<String> source = factory->NewStringFromStaticAscii(".........."); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); - Handle<String> input = factory->NewStringFromAscii(CStrVector("foofoo")); + Handle<String> input = factory->NewStringFromStaticAscii("foofoo"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -972,11 +973,11 @@ TEST(MacroAssemblerNativeBackReferenceASCII) { m.Bind(&missing_match); m.Fail(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("^(..)..\1")); + Handle<String> source = factory->NewStringFromStaticAscii("^(..)..\1"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); - Handle<String> input = factory->NewStringFromAscii(CStrVector("fooofo")); + Handle<String> input = factory->NewStringFromStaticAscii("fooofo"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1021,13 +1022,13 @@ TEST(MacroAssemblerNativeBackReferenceUC16) { m.Bind(&missing_match); m.Fail(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("^(..)..\1")); + Handle<String> source = factory->NewStringFromStaticAscii("^(..)..\1"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); const uc16 input_data[6] = {'f', 0x2028, 'o', 'o', 'f', 0x2028}; - Handle<String> input = - factory->NewStringFromTwoByte(Vector<const uc16>(input_data, 6)); + Handle<String> input = factory->NewStringFromTwoByte( + Vector<const uc16>(input_data, 6)).ToHandleChecked(); Handle<SeqTwoByteString> seq_input = Handle<SeqTwoByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1079,11 +1080,11 @@ TEST(MacroAssemblernativeAtStart) { m.CheckNotCharacter('b', &fail); m.Succeed(); - Handle<String> source = factory->NewStringFromAscii(CStrVector("(^f|ob)")); + Handle<String> source = factory->NewStringFromStaticAscii("(^f|ob)"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); - Handle<String> input = factory->NewStringFromAscii(CStrVector("foobar")); + Handle<String> input = factory->NewStringFromStaticAscii("foobar"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1140,12 +1141,12 @@ TEST(MacroAssemblerNativeBackRefNoCase) { m.Succeed(); Handle<String> source = - factory->NewStringFromAscii(CStrVector("^(abc)\1\1(?!\1)...(?!\1)")); + factory->NewStringFromStaticAscii("^(abc)\1\1(?!\1)...(?!\1)"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); Handle<String> input = - factory->NewStringFromAscii(CStrVector("aBcAbCABCxYzab")); + factory->NewStringFromStaticAscii("aBcAbCABCxYzab"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1241,13 +1242,13 @@ TEST(MacroAssemblerNativeRegisters) { m.Fail(); Handle<String> source = - factory->NewStringFromAscii(CStrVector("<loop test>")); + factory->NewStringFromStaticAscii("<loop test>"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); // String long enough for test (content doesn't matter). Handle<String> input = - factory->NewStringFromAscii(CStrVector("foofoofoofoofoo")); + factory->NewStringFromStaticAscii("foofoofoofoofoo"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1285,13 +1286,13 @@ TEST(MacroAssemblerStackOverflow) { m.GoTo(&loop); Handle<String> source = - factory->NewStringFromAscii(CStrVector("<stack overflow test>")); + factory->NewStringFromStaticAscii("<stack overflow test>"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); // String long enough for test (content doesn't matter). Handle<String> input = - factory->NewStringFromAscii(CStrVector("dummy")); + factory->NewStringFromStaticAscii("dummy"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1332,13 +1333,13 @@ TEST(MacroAssemblerNativeLotsOfRegisters) { m.Succeed(); Handle<String> source = - factory->NewStringFromAscii(CStrVector("<huge register space test>")); + factory->NewStringFromStaticAscii("<huge register space test>"); Handle<Object> code_object = m.GetCode(source); Handle<Code> code = Handle<Code>::cast(code_object); // String long enough for test (content doesn't matter). Handle<String> input = - factory->NewStringFromAscii(CStrVector("sample text")); + factory->NewStringFromStaticAscii("sample text"); Handle<SeqOneByteString> seq_input = Handle<SeqOneByteString>::cast(input); Address start_adr = seq_input->GetCharsAddress(); @@ -1400,13 +1401,13 @@ TEST(MacroAssembler) { Factory* factory = isolate->factory(); HandleScope scope(isolate); - Handle<String> source = factory->NewStringFromAscii(CStrVector("^f(o)o")); + Handle<String> source = factory->NewStringFromStaticAscii("^f(o)o"); Handle<ByteArray> array = Handle<ByteArray>::cast(m.GetCode(source)); int captures[5]; const uc16 str1[] = {'f', 'o', 'o', 'b', 'a', 'r'}; - Handle<String> f1_16 = - factory->NewStringFromTwoByte(Vector<const uc16>(str1, 6)); + Handle<String> f1_16 = factory->NewStringFromTwoByte( + Vector<const uc16>(str1, 6)).ToHandleChecked(); CHECK(IrregexpInterpreter::Match(isolate, array, f1_16, captures, 0)); CHECK_EQ(0, captures[0]); @@ -1416,8 +1417,8 @@ TEST(MacroAssembler) { CHECK_EQ(84, captures[4]); const uc16 str2[] = {'b', 'a', 'r', 'f', 'o', 'o'}; - Handle<String> f2_16 = - factory->NewStringFromTwoByte(Vector<const uc16>(str2, 6)); + Handle<String> f2_16 = factory->NewStringFromTwoByte( + Vector<const uc16>(str2, 6)).ToHandleChecked(); CHECK(!IrregexpInterpreter::Match(isolate, array, f2_16, captures, 0)); CHECK_EQ(42, captures[0]); diff --git a/deps/v8/test/cctest/test-serialize.cc b/deps/v8/test/cctest/test-serialize.cc index c01a6889c..10c35c1c4 100644 --- a/deps/v8/test/cctest/test-serialize.cc +++ b/deps/v8/test/cctest/test-serialize.cc @@ -121,10 +121,8 @@ TEST(ExternalReferenceEncoder) { ExternalReference::address_of_real_stack_limit(isolate); CHECK_EQ(make_code(UNCLASSIFIED, 5), encoder.Encode(real_stack_limit_address.address())); -#ifdef ENABLE_DEBUGGER_SUPPORT CHECK_EQ(make_code(UNCLASSIFIED, 16), encoder.Encode(ExternalReference::debug_break(isolate).address())); -#endif // ENABLE_DEBUGGER_SUPPORT CHECK_EQ(make_code(UNCLASSIFIED, 10), encoder.Encode( ExternalReference::new_space_start(isolate).address())); @@ -157,10 +155,8 @@ TEST(ExternalReferenceDecoder) { decoder.Decode(make_code(UNCLASSIFIED, 4))); CHECK_EQ(ExternalReference::address_of_real_stack_limit(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 5))); -#ifdef ENABLE_DEBUGGER_SUPPORT CHECK_EQ(ExternalReference::debug_break(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 16))); -#endif // ENABLE_DEBUGGER_SUPPORT CHECK_EQ(ExternalReference::new_space_start(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 10))); } @@ -266,7 +262,7 @@ static void Serialize() { // Test that the whole heap can be serialized. TEST(Serialize) { if (!Snapshot::HaveASnapshotToStartFrom()) { - Serializer::Enable(CcTest::i_isolate()); + Serializer::RequestEnable(CcTest::i_isolate()); v8::V8::Initialize(); Serialize(); } @@ -276,7 +272,7 @@ TEST(Serialize) { // Test that heap serialization is non-destructive. TEST(SerializeTwice) { if (!Snapshot::HaveASnapshotToStartFrom()) { - Serializer::Enable(CcTest::i_isolate()); + Serializer::RequestEnable(CcTest::i_isolate()); v8::V8::Initialize(); Serialize(); Serialize(); @@ -301,8 +297,7 @@ static void SanityCheck() { CHECK(isolate->global_object()->IsJSObject()); CHECK(isolate->native_context()->IsContext()); CHECK(CcTest::heap()->string_table()->IsStringTable()); - CHECK(!isolate->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("Empty"))->IsFailure()); + isolate->factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("Empty")); } @@ -375,7 +370,7 @@ DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2, TEST(PartialSerialization) { if (!Snapshot::HaveASnapshotToStartFrom()) { Isolate* isolate = CcTest::i_isolate(); - Serializer::Enable(isolate); + Serializer::RequestEnable(isolate); v8::V8::Initialize(); v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate); Heap* heap = isolate->heap(); @@ -526,7 +521,7 @@ DEPENDENT_TEST(PartialDeserialization, PartialSerialization) { TEST(ContextSerialization) { if (!Snapshot::HaveASnapshotToStartFrom()) { Isolate* isolate = CcTest::i_isolate(); - Serializer::Enable(isolate); + Serializer::RequestEnable(isolate); v8::V8::Initialize(); v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate); Heap* heap = isolate->heap(); diff --git a/deps/v8/test/cctest/test-spaces.cc b/deps/v8/test/cctest/test-spaces.cc index 223912e37..47e2536fc 100644 --- a/deps/v8/test/cctest/test-spaces.cc +++ b/deps/v8/test/cctest/test-spaces.cc @@ -328,9 +328,8 @@ TEST(NewSpace) { CHECK(new_space.HasBeenSetUp()); while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) { - Object* obj = - new_space.AllocateRaw(Page::kMaxRegularHeapObjectSize)-> - ToObjectUnchecked(); + Object* obj = new_space.AllocateRaw( + Page::kMaxRegularHeapObjectSize).ToObjectChecked(); CHECK(new_space.Contains(HeapObject::cast(obj))); } @@ -359,7 +358,7 @@ TEST(OldSpace) { CHECK(s->SetUp()); while (s->Available() > 0) { - s->AllocateRaw(Page::kMaxRegularHeapObjectSize)->ToObjectUnchecked(); + s->AllocateRaw(Page::kMaxRegularHeapObjectSize).ToObjectChecked(); } s->TearDown(); @@ -377,7 +376,7 @@ TEST(LargeObjectSpace) { int lo_size = Page::kPageSize; - Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->ToObjectUnchecked(); + Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked(); CHECK(obj->IsHeapObject()); HeapObject* ho = HeapObject::cast(obj); @@ -390,15 +389,15 @@ TEST(LargeObjectSpace) { while (true) { intptr_t available = lo->Available(); - { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); - if (!maybe_obj->ToObject(&obj)) break; + { AllocationResult allocation = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); + if (allocation.IsRetry()) break; } CHECK(lo->Available() < available); }; CHECK(!lo->IsEmpty()); - CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure()); + CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); } diff --git a/deps/v8/test/cctest/test-strings.cc b/deps/v8/test/cctest/test-strings.cc index 6ff52003b..706836c1c 100644 --- a/deps/v8/test/cctest/test-strings.cc +++ b/deps/v8/test/cctest/test-strings.cc @@ -164,8 +164,8 @@ static void InitializeBuildingBlocks(Handle<String>* building_blocks, for (int j = 0; j < len; j++) { buf[j] = rng->next(0x10000); } - building_blocks[i] = - factory->NewStringFromTwoByte(Vector<const uc16>(buf, len)); + building_blocks[i] = factory->NewStringFromTwoByte( + Vector<const uc16>(buf, len)).ToHandleChecked(); for (int j = 0; j < len; j++) { CHECK_EQ(buf[j], building_blocks[i]->Get(j)); } @@ -176,8 +176,8 @@ static void InitializeBuildingBlocks(Handle<String>* building_blocks, for (int j = 0; j < len; j++) { buf[j] = rng->next(0x80); } - building_blocks[i] = - factory->NewStringFromAscii(Vector<const char>(buf, len)); + building_blocks[i] = factory->NewStringFromAscii( + Vector<const char>(buf, len)).ToHandleChecked(); for (int j = 0; j < len; j++) { CHECK_EQ(buf[j], building_blocks[i]->Get(j)); } @@ -230,11 +230,11 @@ class ConsStringStats { } void Reset(); void VerifyEqual(const ConsStringStats& that) const; - unsigned leaves_; - unsigned empty_leaves_; - unsigned chars_; - unsigned left_traversals_; - unsigned right_traversals_; + int leaves_; + int empty_leaves_; + int chars_; + int left_traversals_; + int right_traversals_; private: DISALLOW_COPY_AND_ASSIGN(ConsStringStats); }; @@ -250,11 +250,11 @@ void ConsStringStats::Reset() { void ConsStringStats::VerifyEqual(const ConsStringStats& that) const { - CHECK(this->leaves_ == that.leaves_); - CHECK(this->empty_leaves_ == that.empty_leaves_); - CHECK(this->chars_ == that.chars_); - CHECK(this->left_traversals_ == that.left_traversals_); - CHECK(this->right_traversals_ == that.right_traversals_); + CHECK_EQ(this->leaves_, that.leaves_); + CHECK_EQ(this->empty_leaves_, that.empty_leaves_); + CHECK_EQ(this->chars_, that.chars_); + CHECK_EQ(this->left_traversals_, that.left_traversals_); + CHECK_EQ(this->right_traversals_, that.right_traversals_); } @@ -270,14 +270,14 @@ class ConsStringGenerationData { double leftness_; double rightness_; double empty_leaf_threshold_; - unsigned max_leaves_; + int max_leaves_; // Cached data. Handle<String> building_blocks_[kNumberOfBuildingBlocks]; String* empty_string_; MyRandomNumberGenerator rng_; // Stats. ConsStringStats stats_; - unsigned early_terminations_; + int early_terminations_; private: DISALLOW_COPY_AND_ASSIGN(ConsStringGenerationData); }; @@ -356,23 +356,14 @@ void AccumulateStats(Handle<String> cons_string, ConsStringStats* stats) { void AccumulateStatsWithOperator( ConsString* cons_string, ConsStringStats* stats) { - unsigned offset = 0; - int32_t type = cons_string->map()->instance_type(); - unsigned length = static_cast<unsigned>(cons_string->length()); - ConsStringIteratorOp op; - String* string = op.Operate(cons_string, &offset, &type, &length); - CHECK(string != NULL); - while (true) { - ASSERT(!string->IsConsString()); + ConsStringIteratorOp op(cons_string); + String* string; + int offset; + while (NULL != (string = op.Next(&offset))) { // Accumulate stats. + CHECK_EQ(0, offset); stats->leaves_++; stats->chars_ += string->length(); - // Check for completion. - bool keep_going_fast_check = op.HasMore(); - string = op.ContinueOperation(&type, &length); - if (string == NULL) return; - // Verify no false positives for fast check. - CHECK(keep_going_fast_check); } } @@ -380,7 +371,7 @@ void AccumulateStatsWithOperator( void VerifyConsString(Handle<String> root, ConsStringGenerationData* data) { // Verify basic data. CHECK(root->IsConsString()); - CHECK(static_cast<unsigned>(root->length()) == data->stats_.chars_); + CHECK_EQ(root->length(), data->stats_.chars_); // Recursive verify. ConsStringStats stats; AccumulateStats(ConsString::cast(*root), &stats); @@ -447,12 +438,12 @@ static Handle<String> ConstructRandomString(ConsStringGenerationData* data, left = ConstructRandomString(data, max_recursion - 1); } // Build the cons string. - Handle<String> root = factory->NewConsString(left, right); + Handle<String> root = factory->NewConsString(left, right).ToHandleChecked(); CHECK(root->IsConsString() && !root->IsFlat()); // Special work needed for flat string. if (flat) { data->stats_.empty_leaves_++; - FlattenString(root); + String::Flatten(root); CHECK(root->IsConsString() && root->IsFlat()); } return root; @@ -463,11 +454,12 @@ static Handle<String> ConstructLeft( ConsStringGenerationData* data, int depth) { Factory* factory = CcTest::i_isolate()->factory(); - Handle<String> answer = factory->NewStringFromAscii(CStrVector("")); + Handle<String> answer = factory->NewStringFromStaticAscii(""); data->stats_.leaves_++; for (int i = 0; i < depth; i++) { Handle<String> block = data->block(i); - Handle<String> next = factory->NewConsString(answer, block); + Handle<String> next = + factory->NewConsString(answer, block).ToHandleChecked(); if (next->IsConsString()) data->stats_.leaves_++; data->stats_.chars_ += block->length(); answer = next; @@ -481,11 +473,12 @@ static Handle<String> ConstructRight( ConsStringGenerationData* data, int depth) { Factory* factory = CcTest::i_isolate()->factory(); - Handle<String> answer = factory->NewStringFromAscii(CStrVector("")); + Handle<String> answer = factory->NewStringFromStaticAscii(""); data->stats_.leaves_++; for (int i = depth - 1; i >= 0; i--) { Handle<String> block = data->block(i); - Handle<String> next = factory->NewConsString(block, answer); + Handle<String> next = + factory->NewConsString(block, answer).ToHandleChecked(); if (next->IsConsString()) data->stats_.leaves_++; data->stats_.chars_ += block->length(); answer = next; @@ -508,7 +501,8 @@ static Handle<String> ConstructBalancedHelper( if (to - from == 2) { data->stats_.chars_ += data->block(from)->length(); data->stats_.chars_ += data->block(from+1)->length(); - return factory->NewConsString(data->block(from), data->block(from+1)); + return factory->NewConsString(data->block(from), data->block(from+1)) + .ToHandleChecked(); } Handle<String> part1 = ConstructBalancedHelper(data, from, from + ((to - from) / 2)); @@ -516,7 +510,7 @@ static Handle<String> ConstructBalancedHelper( ConstructBalancedHelper(data, from + ((to - from) / 2), to); if (part1->IsConsString()) data->stats_.left_traversals_++; if (part2->IsConsString()) data->stats_.right_traversals_++; - return factory->NewConsString(part1, part2); + return factory->NewConsString(part1, part2).ToHandleChecked(); } @@ -570,7 +564,7 @@ TEST(Traverse) { v8::HandleScope scope(CcTest::isolate()); ConsStringGenerationData data(false); Handle<String> flat = ConstructBalanced(&data); - FlattenString(flat); + String::Flatten(flat); Handle<String> left_asymmetric = ConstructLeft(&data, DEEP_DEPTH); Handle<String> right_asymmetric = ConstructRight(&data, DEEP_DEPTH); Handle<String> symmetric = ConstructBalanced(&data); @@ -590,19 +584,19 @@ TEST(Traverse) { printf("6\n"); TraverseFirst(left_asymmetric, right_deep_asymmetric, 65536); printf("7\n"); - FlattenString(left_asymmetric); + String::Flatten(left_asymmetric); printf("10\n"); Traverse(flat, left_asymmetric); printf("11\n"); - FlattenString(right_asymmetric); + String::Flatten(right_asymmetric); printf("12\n"); Traverse(flat, right_asymmetric); printf("14\n"); - FlattenString(symmetric); + String::Flatten(symmetric); printf("15\n"); Traverse(flat, symmetric); printf("16\n"); - FlattenString(left_deep_asymmetric); + String::Flatten(left_deep_asymmetric); printf("18\n"); } @@ -622,9 +616,9 @@ static void VerifyCharacterStream( // Want to test the offset == length case. if (offset > length) offset = length; StringCharacterStream flat_stream( - flat_string, &cons_string_iterator_op_1, static_cast<unsigned>(offset)); + flat_string, &cons_string_iterator_op_1, offset); StringCharacterStream cons_stream( - cons_string, &cons_string_iterator_op_2, static_cast<unsigned>(offset)); + cons_string, &cons_string_iterator_op_2, offset); for (int i = offset; i < length; i++) { uint16_t c = flat_string->Get(i); CHECK(flat_stream.HasMore()); @@ -667,7 +661,7 @@ void TestStringCharacterStream(BuildString build, int test_cases) { ConsStringStats flat_string_stats; AccumulateStats(flat_string, &flat_string_stats); // Flatten string. - FlattenString(flat_string); + String::Flatten(flat_string); // Build unflattened version of cons string to test. Handle<String> cons_string = build(i, &data); ConsStringStats cons_string_stats; @@ -710,7 +704,8 @@ static Handle<String> BuildEdgeCaseConsString( data->stats_.chars_ += data->block(0)->length(); data->stats_.chars_ += data->block(1)->length(); data->stats_.leaves_ += 2; - return factory->NewConsString(data->block(0), data->block(1)); + return factory->NewConsString(data->block(0), data->block(1)) + .ToHandleChecked(); case 6: // Simple flattened tree. data->stats_.chars_ += data->block(0)->length(); @@ -719,8 +714,9 @@ static Handle<String> BuildEdgeCaseConsString( data->stats_.empty_leaves_ += 1; { Handle<String> string = - factory->NewConsString(data->block(0), data->block(1)); - FlattenString(string); + factory->NewConsString(data->block(0), data->block(1)) + .ToHandleChecked(); + String::Flatten(string); return string; } case 7: @@ -733,9 +729,10 @@ static Handle<String> BuildEdgeCaseConsString( data->stats_.left_traversals_ += 1; { Handle<String> left = - factory->NewConsString(data->block(0), data->block(1)); - FlattenString(left); - return factory->NewConsString(left, data->block(2)); + factory->NewConsString(data->block(0), data->block(1)) + .ToHandleChecked(); + String::Flatten(left); + return factory->NewConsString(left, data->block(2)).ToHandleChecked(); } case 8: // Left node and right node flattened. @@ -749,12 +746,14 @@ static Handle<String> BuildEdgeCaseConsString( data->stats_.right_traversals_ += 1; { Handle<String> left = - factory->NewConsString(data->block(0), data->block(1)); - FlattenString(left); + factory->NewConsString(data->block(0), data->block(1)) + .ToHandleChecked(); + String::Flatten(left); Handle<String> right = - factory->NewConsString(data->block(2), data->block(2)); - FlattenString(right); - return factory->NewConsString(left, right); + factory->NewConsString(data->block(2), data->block(2)) + .ToHandleChecked(); + String::Flatten(right); + return factory->NewConsString(left, right).ToHandleChecked(); } } UNREACHABLE(); @@ -862,14 +861,15 @@ TEST(DeepAscii) { for (int i = 0; i < DEEP_ASCII_DEPTH; i++) { foo[i] = "foo "[i % 4]; } - Handle<String> string = - factory->NewStringFromAscii(Vector<const char>(foo, DEEP_ASCII_DEPTH)); - Handle<String> foo_string = factory->NewStringFromAscii(CStrVector("foo")); + Handle<String> string = factory->NewStringFromOneByte( + OneByteVector(foo, DEEP_ASCII_DEPTH)).ToHandleChecked(); + Handle<String> foo_string = factory->NewStringFromStaticAscii("foo"); for (int i = 0; i < DEEP_ASCII_DEPTH; i += 10) { - string = factory->NewConsString(string, foo_string); + string = factory->NewConsString(string, foo_string).ToHandleChecked(); } - Handle<String> flat_string = factory->NewConsString(string, foo_string); - FlattenString(flat_string); + Handle<String> flat_string = + factory->NewConsString(string, foo_string).ToHandleChecked(); + String::Flatten(flat_string); for (int i = 0; i < 500; i++) { TraverseFirst(flat_string, string, DEEP_ASCII_DEPTH); @@ -1078,7 +1078,7 @@ TEST(CachedHashOverflow) { CHECK_EQ(results[i]->IsUndefined(), result->IsUndefined()); CHECK_EQ(results[i]->IsNumber(), result->IsNumber()); if (result->IsNumber()) { - CHECK_EQ(Smi::cast(results[i]->ToSmi()->ToObjectChecked())->value(), + CHECK_EQ(Object::ToSmi(isolate, results[i]).ToHandleChecked()->value(), result->ToInt32()->Value()); } } @@ -1091,8 +1091,9 @@ TEST(SliceFromCons) { Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); Handle<String> string = - factory->NewStringFromAscii(CStrVector("parentparentparent")); - Handle<String> parent = factory->NewConsString(string, string); + factory->NewStringFromStaticAscii("parentparentparent"); + Handle<String> parent = + factory->NewConsString(string, string).ToHandleChecked(); CHECK(parent->IsConsString()); CHECK(!parent->IsFlat()); Handle<String> slice = factory->NewSubString(parent, 1, 25); @@ -1127,7 +1128,8 @@ TEST(SliceFromExternal) { v8::HandleScope scope(CcTest::isolate()); AsciiVectorResource resource( i::Vector<const char>("abcdefghijklmnopqrstuvwxyz", 26)); - Handle<String> string = factory->NewExternalStringFromAscii(&resource); + Handle<String> string = + factory->NewExternalStringFromAscii(&resource).ToHandleChecked(); CHECK(string->IsExternalString()); Handle<String> slice = factory->NewSubString(string, 1, 25); CHECK(slice->IsSlicedString()); @@ -1201,7 +1203,7 @@ TEST(AsciiArrayJoin) { // Set heap limits. static const int K = 1024; v8::ResourceConstraints constraints; - constraints.set_max_young_space_size(256 * K); + constraints.set_max_new_space_size(2 * K * K); constraints.set_max_old_space_size(4 * K * K); v8::SetResourceConstraints(CcTest::isolate(), &constraints); @@ -1407,7 +1409,5 @@ TEST(InvalidExternalString) { INVALID_STRING_TEST(NewStringFromAscii, char) INVALID_STRING_TEST(NewStringFromUtf8, char) INVALID_STRING_TEST(NewStringFromOneByte, uint8_t) -INVALID_STRING_TEST(InternalizeOneByteString, uint8_t) -INVALID_STRING_TEST(InternalizeUtf8String, char) #undef INVALID_STRING_TEST diff --git a/deps/v8/test/cctest/test-strtod.cc b/deps/v8/test/cctest/test-strtod.cc index 237d35db1..bebf4d14b 100644 --- a/deps/v8/test/cctest/test-strtod.cc +++ b/deps/v8/test/cctest/test-strtod.cc @@ -34,6 +34,7 @@ #include "diy-fp.h" #include "double.h" #include "strtod.h" +#include "utils/random-number-generator.h" using namespace v8::internal; @@ -448,13 +449,13 @@ static const int kShortStrtodRandomCount = 2; static const int kLargeStrtodRandomCount = 2; TEST(RandomStrtod) { - srand(static_cast<unsigned int>(time(NULL))); + RandomNumberGenerator rng; char buffer[kBufferSize]; for (int length = 1; length < 15; length++) { for (int i = 0; i < kShortStrtodRandomCount; ++i) { int pos = 0; for (int j = 0; j < length; ++j) { - buffer[pos++] = rand() % 10 + '0'; + buffer[pos++] = rng.NextInt(10) + '0'; } int exponent = DeterministicRandom() % (25*2 + 1) - 25 - length; buffer[pos] = '\0'; @@ -467,7 +468,7 @@ TEST(RandomStrtod) { for (int i = 0; i < kLargeStrtodRandomCount; ++i) { int pos = 0; for (int j = 0; j < length; ++j) { - buffer[pos++] = rand() % 10 + '0'; + buffer[pos++] = rng.NextInt(10) + '0'; } int exponent = DeterministicRandom() % (308*2 + 1) - 308 - length; buffer[pos] = '\0'; diff --git a/deps/v8/test/cctest/test-symbols.cc b/deps/v8/test/cctest/test-symbols.cc index 6fceea613..f0d0ed160 100644 --- a/deps/v8/test/cctest/test-symbols.cc +++ b/deps/v8/test/cctest/test-symbols.cc @@ -33,7 +33,7 @@ TEST(Create) { symbols[i]->Print(); #endif #if VERIFY_HEAP - symbols[i]->Verify(); + symbols[i]->ObjectVerify(); #endif } diff --git a/deps/v8/test/cctest/test-types.cc b/deps/v8/test/cctest/test-types.cc index 326bd1b56..47868f648 100644 --- a/deps/v8/test/cctest/test-types.cc +++ b/deps/v8/test/cctest/test-types.cc @@ -25,96 +25,213 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#include <vector> + #include "cctest.h" #include "types.h" +#include "utils/random-number-generator.h" using namespace v8::internal; +// Testing auxiliaries (breaking the Type abstraction). +struct ZoneRep { + typedef void* Struct; + + static bool IsStruct(Type* t, int tag) { + return !IsBitset(t) && reinterpret_cast<intptr_t>(AsStruct(t)[0]) == tag; + } + static bool IsBitset(Type* t) { return reinterpret_cast<intptr_t>(t) & 1; } + static bool IsClass(Type* t) { return IsStruct(t, 0); } + static bool IsConstant(Type* t) { return IsStruct(t, 1); } + static bool IsArray(Type* t) { return IsStruct(t, 2); } + static bool IsFunction(Type* t) { return IsStruct(t, 3); } + static bool IsUnion(Type* t) { return IsStruct(t, 4); } + + static Struct* AsStruct(Type* t) { + return reinterpret_cast<Struct*>(t); + } + static int AsBitset(Type* t) { + return static_cast<int>(reinterpret_cast<intptr_t>(t) >> 1); + } + static Map* AsClass(Type* t) { + return *static_cast<Map**>(AsStruct(t)[3]); + } + static Object* AsConstant(Type* t) { + return *static_cast<Object**>(AsStruct(t)[3]); + } + static Struct* AsUnion(Type* t) { + return AsStruct(t); + } + static int Length(Struct* structured) { + return static_cast<int>(reinterpret_cast<intptr_t>(structured[1])); + } + + static Zone* ToRegion(Zone* zone, Isolate* isolate) { return zone; } +}; + + +struct HeapRep { + typedef FixedArray Struct; + + static bool IsStruct(Handle<HeapType> t, int tag) { + return t->IsFixedArray() && Smi::cast(AsStruct(t)->get(0))->value() == tag; + } + static bool IsBitset(Handle<HeapType> t) { return t->IsSmi(); } + static bool IsClass(Handle<HeapType> t) { return t->IsMap(); } + static bool IsConstant(Handle<HeapType> t) { return t->IsBox(); } + static bool IsArray(Handle<HeapType> t) { return IsStruct(t, 2); } + static bool IsFunction(Handle<HeapType> t) { return IsStruct(t, 3); } + static bool IsUnion(Handle<HeapType> t) { return IsStruct(t, 4); } + + static Struct* AsStruct(Handle<HeapType> t) { return FixedArray::cast(*t); } + static int AsBitset(Handle<HeapType> t) { return Smi::cast(*t)->value(); } + static Map* AsClass(Handle<HeapType> t) { return Map::cast(*t); } + static Object* AsConstant(Handle<HeapType> t) { + return Box::cast(*t)->value(); + } + static Struct* AsUnion(Handle<HeapType> t) { return AsStruct(t); } + static int Length(Struct* structured) { return structured->length() - 1; } + + static Isolate* ToRegion(Zone* zone, Isolate* isolate) { return isolate; } +}; + + template<class Type, class TypeHandle, class Region> class Types { public: - Types(Region* region, Isolate* isolate) : - Representation(Type::Representation(region)), - Semantic(Type::Semantic(region)), - None(Type::None(region)), - Any(Type::Any(region)), - Oddball(Type::Oddball(region)), - Boolean(Type::Boolean(region)), - Null(Type::Null(region)), - Undefined(Type::Undefined(region)), - Number(Type::Number(region)), - SignedSmall(Type::SignedSmall(region)), - Signed32(Type::Signed32(region)), - Float(Type::Float(region)), - Name(Type::Name(region)), - UniqueName(Type::UniqueName(region)), - String(Type::String(region)), - InternalizedString(Type::InternalizedString(region)), - Symbol(Type::Symbol(region)), - Receiver(Type::Receiver(region)), - Object(Type::Object(region)), - Array(Type::Array(region)), - Function(Type::Function(region)), - Proxy(Type::Proxy(region)), - object_map(isolate->factory()->NewMap(JS_OBJECT_TYPE, 3 * kPointerSize)), - array_map(isolate->factory()->NewMap(JS_ARRAY_TYPE, 4 * kPointerSize)), - region_(region) { + Types(Region* region, Isolate* isolate) : region_(region) { + #define DECLARE_TYPE(name, value) \ + name = Type::name(region); \ + types.push_back(name); + BITSET_TYPE_LIST(DECLARE_TYPE) + #undef DECLARE_TYPE + + object_map = isolate->factory()->NewMap(JS_OBJECT_TYPE, 3 * kPointerSize); + array_map = isolate->factory()->NewMap(JS_ARRAY_TYPE, 4 * kPointerSize); + uninitialized_map = isolate->factory()->uninitialized_map(); + ObjectClass = Type::Class(object_map, region); + ArrayClass = Type::Class(array_map, region); + UninitializedClass = Type::Class(uninitialized_map, region); + + maps.push_back(object_map); + maps.push_back(array_map); + maps.push_back(uninitialized_map); + for (MapVector::iterator it = maps.begin(); it != maps.end(); ++it) { + types.push_back(Type::Class(*it, region)); + } + smi = handle(Smi::FromInt(666), isolate); signed32 = isolate->factory()->NewHeapNumber(0x40000000); object1 = isolate->factory()->NewJSObjectFromMap(object_map); object2 = isolate->factory()->NewJSObjectFromMap(object_map); array = isolate->factory()->NewJSArray(20); - ObjectClass = Type::Class(object_map, region); - ArrayClass = Type::Class(array_map, region); + uninitialized = isolate->factory()->uninitialized_value(); SmiConstant = Type::Constant(smi, region); Signed32Constant = Type::Constant(signed32, region); ObjectConstant1 = Type::Constant(object1, region); ObjectConstant2 = Type::Constant(object2, region); - ArrayConstant1 = Type::Constant(array, region); - ArrayConstant2 = Type::Constant(array, region); - } - - TypeHandle Representation; - TypeHandle Semantic; - TypeHandle None; - TypeHandle Any; - TypeHandle Oddball; - TypeHandle Boolean; - TypeHandle Null; - TypeHandle Undefined; - TypeHandle Number; - TypeHandle SignedSmall; - TypeHandle Signed32; - TypeHandle Float; - TypeHandle Name; - TypeHandle UniqueName; - TypeHandle String; - TypeHandle InternalizedString; - TypeHandle Symbol; - TypeHandle Receiver; - TypeHandle Object; - TypeHandle Array; - TypeHandle Function; - TypeHandle Proxy; + ArrayConstant = Type::Constant(array, region); + UninitializedConstant = Type::Constant(uninitialized, region); + + values.push_back(smi); + values.push_back(signed32); + values.push_back(object1); + values.push_back(object2); + values.push_back(array); + values.push_back(uninitialized); + for (ValueVector::iterator it = values.begin(); it != values.end(); ++it) { + types.push_back(Type::Constant(*it, region)); + } - TypeHandle ObjectClass; - TypeHandle ArrayClass; + FloatArray = Type::Array(Float, region); + StringArray = Type::Array(String, region); + AnyArray = Type::Array(Any, region); - TypeHandle SmiConstant; - TypeHandle Signed32Constant; - TypeHandle ObjectConstant1; - TypeHandle ObjectConstant2; - TypeHandle ArrayConstant1; - TypeHandle ArrayConstant2; + SignedFunction1 = Type::Function(SignedSmall, SignedSmall, region); + NumberFunction1 = Type::Function(Number, Number, region); + NumberFunction2 = Type::Function(Number, Number, Number, region); + MethodFunction = Type::Function(String, Object, 0, region); + + for (int i = 0; i < 50; ++i) { + types.push_back(Fuzz()); + } + } Handle<i::Map> object_map; Handle<i::Map> array_map; + Handle<i::Map> uninitialized_map; Handle<i::Smi> smi; Handle<i::HeapNumber> signed32; Handle<i::JSObject> object1; Handle<i::JSObject> object2; Handle<i::JSArray> array; + Handle<i::Oddball> uninitialized; + + #define DECLARE_TYPE(name, value) TypeHandle name; + BITSET_TYPE_LIST(DECLARE_TYPE) + #undef DECLARE_TYPE + + TypeHandle ObjectClass; + TypeHandle ArrayClass; + TypeHandle UninitializedClass; + + TypeHandle SmiConstant; + TypeHandle Signed32Constant; + TypeHandle ObjectConstant1; + TypeHandle ObjectConstant2; + TypeHandle ArrayConstant; + TypeHandle UninitializedConstant; + + TypeHandle FloatArray; + TypeHandle StringArray; + TypeHandle AnyArray; + + TypeHandle SignedFunction1; + TypeHandle NumberFunction1; + TypeHandle NumberFunction2; + TypeHandle MethodFunction; + + typedef std::vector<TypeHandle> TypeVector; + typedef std::vector<Handle<i::Map> > MapVector; + typedef std::vector<Handle<i::Object> > ValueVector; + TypeVector types; + MapVector maps; + ValueVector values; + + TypeHandle Of(Handle<i::Object> value) { + return Type::Of(value, region_); + } + + TypeHandle NowOf(Handle<i::Object> value) { + return Type::NowOf(value, region_); + } + + TypeHandle Constant(Handle<i::Object> value) { + return Type::Constant(value, region_); + } + + TypeHandle Class(Handle<i::Map> map) { + return Type::Class(map, region_); + } + + TypeHandle Array1(TypeHandle element) { + return Type::Array(element, region_); + } + + TypeHandle Function0(TypeHandle result, TypeHandle receiver) { + return Type::Function(result, receiver, 0, region_); + } + + TypeHandle Function1(TypeHandle result, TypeHandle receiver, TypeHandle arg) { + TypeHandle type = Type::Function(result, receiver, 1, region_); + type->AsFunction()->InitParameter(0, arg); + return type; + } + + TypeHandle Function2(TypeHandle result, TypeHandle arg1, TypeHandle arg2) { + return Type::Function(result, arg1, arg2, region_); + } TypeHandle Union(TypeHandle t1, TypeHandle t2) { return Type::Union(t1, t2, region_); @@ -128,42 +245,55 @@ class Types { return Type::template Convert<Type2>(t, region_); } + TypeHandle Random() { + return types[rng_.NextInt(static_cast<int>(types.size()))]; + } + TypeHandle Fuzz(int depth = 5) { - switch (rand() % (depth == 0 ? 3 : 20)) { + switch (rng_.NextInt(depth == 0 ? 3 : 20)) { case 0: { // bitset int n = 0 #define COUNT_BITSET_TYPES(type, value) + 1 BITSET_TYPE_LIST(COUNT_BITSET_TYPES) #undef COUNT_BITSET_TYPES ; - int i = rand() % n; + int i = rng_.NextInt(n); #define PICK_BITSET_TYPE(type, value) \ if (i-- == 0) return Type::type(region_); BITSET_TYPE_LIST(PICK_BITSET_TYPE) #undef PICK_BITSET_TYPE UNREACHABLE(); } - case 1: // class - switch (rand() % 2) { - case 0: return ObjectClass; - case 1: return ArrayClass; - } - UNREACHABLE(); - case 2: // constant - switch (rand() % 6) { - case 0: return SmiConstant; - case 1: return Signed32Constant; - case 2: return ObjectConstant1; - case 3: return ObjectConstant2; - case 4: return ArrayConstant1; - case 5: return ArrayConstant2; + case 1: { // class + int i = rng_.NextInt(static_cast<int>(maps.size())); + return Type::Class(maps[i], region_); + } + case 2: { // constant + int i = rng_.NextInt(static_cast<int>(values.size())); + return Type::Constant(values[i], region_); + } + case 3: { // array + TypeHandle element = Fuzz(depth / 2); + return Type::Array(element, region_); + } + case 4: + case 5: + case 6: { // function + TypeHandle result = Fuzz(depth / 2); + TypeHandle receiver = Fuzz(depth / 2); + int arity = rng_.NextInt(3); + TypeHandle type = Type::Function(result, receiver, arity, region_); + for (int i = 0; i < type->AsFunction()->Arity(); ++i) { + TypeHandle parameter = Fuzz(depth - 1); + type->AsFunction()->InitParameter(i, parameter); } - UNREACHABLE(); + } default: { // union - int n = rand() % 10; + int n = rng_.NextInt(10); TypeHandle type = None; for (int i = 0; i < n; ++i) { - type = Type::Union(type, Fuzz(depth - 1), region_); + TypeHandle operand = Fuzz(depth - 1); + type = Type::Union(type, operand, region_); } return type; } @@ -173,65 +303,21 @@ class Types { private: Region* region_; -}; - - -// Testing auxiliaries (breaking the Type abstraction). -struct ZoneRep { - static bool IsTagged(Type* t, int tag) { - return !IsBitset(t) - && reinterpret_cast<intptr_t>(AsTagged(t)->at(0)) == tag; - } - static bool IsBitset(Type* t) { return reinterpret_cast<intptr_t>(t) & 1; } - static bool IsClass(Type* t) { return IsTagged(t, 0); } - static bool IsConstant(Type* t) { return IsTagged(t, 1); } - static bool IsUnion(Type* t) { return IsTagged(t, 2); } - - static ZoneList<void*>* AsTagged(Type* t) { - return reinterpret_cast<ZoneList<void*>*>(t); - } - static int AsBitset(Type* t) { - return static_cast<int>(reinterpret_cast<intptr_t>(t) >> 1); - } - static Map* AsClass(Type* t) { - return *reinterpret_cast<Map**>(AsTagged(t)->at(2)); - } - static Object* AsConstant(Type* t) { - return *reinterpret_cast<Object**>(AsTagged(t)->at(2)); - } - static ZoneList<Type*>* AsUnion(Type* t) { - return reinterpret_cast<ZoneList<Type*>*>(AsTagged(t)); - } - - static Zone* ToRegion(Zone* zone, Isolate* isolate) { return zone; } -}; - - -struct HeapRep { - static bool IsBitset(Handle<HeapType> t) { return t->IsSmi(); } - static bool IsClass(Handle<HeapType> t) { return t->IsMap(); } - static bool IsConstant(Handle<HeapType> t) { return t->IsBox(); } - static bool IsUnion(Handle<HeapType> t) { return t->IsFixedArray(); } - - static int AsBitset(Handle<HeapType> t) { return Smi::cast(*t)->value(); } - static Map* AsClass(Handle<HeapType> t) { return Map::cast(*t); } - static Object* AsConstant(Handle<HeapType> t) { - return Box::cast(*t)->value(); - } - static FixedArray* AsUnion(Handle<HeapType> t) { - return FixedArray::cast(*t); - } - - static Isolate* ToRegion(Zone* zone, Isolate* isolate) { return isolate; } + RandomNumberGenerator rng_; }; template<class Type, class TypeHandle, class Region, class Rep> struct Tests : Rep { + typedef Types<Type, TypeHandle, Region> TypesInstance; + typedef typename TypesInstance::TypeVector::iterator TypeIterator; + typedef typename TypesInstance::MapVector::iterator MapIterator; + typedef typename TypesInstance::ValueVector::iterator ValueIterator; + Isolate* isolate; HandleScope scope; Zone zone; - Types<Type, TypeHandle, Region> T; + TypesInstance T; Tests() : isolate(CcTest::i_isolate()), @@ -240,24 +326,27 @@ struct Tests : Rep { T(Rep::ToRegion(&zone, isolate), isolate) { } + bool Equal(TypeHandle type1, TypeHandle type2) { + return + type1->Is(type2) && type2->Is(type1) && + Rep::IsBitset(type1) == Rep::IsBitset(type2) && + Rep::IsClass(type1) == Rep::IsClass(type2) && + Rep::IsConstant(type1) == Rep::IsConstant(type2) && + Rep::IsUnion(type1) == Rep::IsUnion(type2) && + type1->NumClasses() == type2->NumClasses() && + type1->NumConstants() == type2->NumConstants() && + (!Rep::IsBitset(type1) || + Rep::AsBitset(type1) == Rep::AsBitset(type2)) && + (!Rep::IsClass(type1) || + Rep::AsClass(type1) == Rep::AsClass(type2)) && + (!Rep::IsConstant(type1) || + Rep::AsConstant(type1) == Rep::AsConstant(type2)) && + (!Rep::IsUnion(type1) || + Rep::Length(Rep::AsUnion(type1)) == Rep::Length(Rep::AsUnion(type2))); + } + void CheckEqual(TypeHandle type1, TypeHandle type2) { - CHECK_EQ(Rep::IsBitset(type1), Rep::IsBitset(type2)); - CHECK_EQ(Rep::IsClass(type1), Rep::IsClass(type2)); - CHECK_EQ(Rep::IsConstant(type1), Rep::IsConstant(type2)); - CHECK_EQ(Rep::IsUnion(type1), Rep::IsUnion(type2)); - CHECK_EQ(type1->NumClasses(), type2->NumClasses()); - CHECK_EQ(type1->NumConstants(), type2->NumConstants()); - if (Rep::IsBitset(type1)) { - CHECK_EQ(Rep::AsBitset(type1), Rep::AsBitset(type2)); - } else if (Rep::IsClass(type1)) { - CHECK_EQ(Rep::AsClass(type1), Rep::AsClass(type2)); - } else if (Rep::IsConstant(type1)) { - CHECK_EQ(Rep::AsConstant(type1), Rep::AsConstant(type2)); - } else if (Rep::IsUnion(type1)) { - CHECK_EQ(Rep::AsUnion(type1)->length(), Rep::AsUnion(type2)->length()); - } - CHECK(type1->Is(type2)); - CHECK(type2->Is(type1)); + CHECK(Equal(type1, type2)); } void CheckSub(TypeHandle type1, TypeHandle type2) { @@ -297,69 +386,390 @@ struct Tests : Rep { } void Bitset() { + // None and Any are bitsets. CHECK(this->IsBitset(T.None)); CHECK(this->IsBitset(T.Any)); - CHECK(this->IsBitset(T.String)); - CHECK(this->IsBitset(T.Object)); - - CHECK(this->IsBitset(T.Union(T.String, T.Number))); - CHECK(this->IsBitset(T.Union(T.String, T.Receiver))); CHECK_EQ(0, this->AsBitset(T.None)); - CHECK_EQ( - this->AsBitset(T.Number) | this->AsBitset(T.String), - this->AsBitset(T.Union(T.String, T.Number))); - CHECK_EQ( - this->AsBitset(T.Receiver), - this->AsBitset(T.Union(T.Receiver, T.Object))); + CHECK_EQ(-1, this->AsBitset(T.Any)); + + // Union(T1, T2) is bitset for bitsets T1,T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + CHECK(!(this->IsBitset(type1) && this->IsBitset(type2)) || + this->IsBitset(union12)); + } + } + + // Intersect(T1, T2) is bitset for bitsets T1,T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + CHECK(!(this->IsBitset(type1) && this->IsBitset(type2)) || + this->IsBitset(intersect12)); + } + } + + // Union(T1, T2) is bitset if T2 is bitset and T1->Is(T2) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + CHECK(!(this->IsBitset(type2) && type1->Is(type2)) || + this->IsBitset(union12)); + } + } + + // Union(T1, T2) is bitwise disjunction for bitsets T1,T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + if (this->IsBitset(type1) && this->IsBitset(type2)) { + CHECK_EQ( + this->AsBitset(type1) | this->AsBitset(type2), + this->AsBitset(union12)); + } + } + } + + // Intersect(T1, T2) is bitwise conjunction for bitsets T1,T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + if (this->IsBitset(type1) && this->IsBitset(type2)) { + CHECK_EQ( + this->AsBitset(type1) & this->AsBitset(type2), + this->AsBitset(intersect12)); + } + } + } } void Class() { - CHECK(this->IsClass(T.ObjectClass)); - CHECK(this->IsClass(T.ArrayClass)); + // Constructor + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + Handle<i::Map> map = *mt; + TypeHandle type = T.Class(map); + CHECK(this->IsClass(type)); + } + + // Map attribute + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + Handle<i::Map> map = *mt; + TypeHandle type = T.Class(map); + CHECK(*map == *type->AsClass()->Map()); + } - CHECK(*T.object_map == this->AsClass(T.ObjectClass)); - CHECK(*T.array_map == this->AsClass(T.ArrayClass)); + // Functionality & Injectivity: Class(M1) = Class(M2) iff M1 = M2 + for (MapIterator mt1 = T.maps.begin(); mt1 != T.maps.end(); ++mt1) { + for (MapIterator mt2 = T.maps.begin(); mt2 != T.maps.end(); ++mt2) { + Handle<i::Map> map1 = *mt1; + Handle<i::Map> map2 = *mt2; + TypeHandle type1 = T.Class(map1); + TypeHandle type2 = T.Class(map2); + CHECK(Equal(type1, type2) == (*map1 == *map2)); + } + } } void Constant() { - CHECK(this->IsConstant(T.SmiConstant)); - CHECK(this->IsConstant(T.ObjectConstant1)); - CHECK(this->IsConstant(T.ObjectConstant2)); - CHECK(this->IsConstant(T.ArrayConstant1)); - CHECK(this->IsConstant(T.ArrayConstant2)); + // Constructor + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Object> value = *vt; + TypeHandle type = T.Constant(value); + CHECK(this->IsConstant(type)); + } + + // Value attribute + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Object> value = *vt; + TypeHandle type = T.Constant(value); + CHECK(*value == *type->AsConstant()->Value()); + } + + // Functionality & Injectivity: Constant(V1) = Constant(V2) iff V1 = V2 + for (ValueIterator vt1 = T.values.begin(); vt1 != T.values.end(); ++vt1) { + for (ValueIterator vt2 = T.values.begin(); vt2 != T.values.end(); ++vt2) { + Handle<i::Object> value1 = *vt1; + Handle<i::Object> value2 = *vt2; + TypeHandle type1 = T.Constant(value1); + TypeHandle type2 = T.Constant(value2); + CHECK(Equal(type1, type2) == (*value1 == *value2)); + } + } + } + + void Array() { + // Constructor + for (int i = 0; i < 20; ++i) { + TypeHandle type = T.Random(); + TypeHandle array = T.Array1(type); + CHECK(this->IsArray(array)); + } + + // Attributes + for (int i = 0; i < 20; ++i) { + TypeHandle type = T.Random(); + TypeHandle array = T.Array1(type); + CheckEqual(type, array->AsArray()->Element()); + } + + // Functionality & Injectivity: Array(T1) = Array(T2) iff T1 = T2 + for (int i = 0; i < 20; ++i) { + for (int j = 0; j < 20; ++j) { + TypeHandle type1 = T.Random(); + TypeHandle type2 = T.Random(); + TypeHandle array1 = T.Array1(type1); + TypeHandle array2 = T.Array1(type2); + CHECK(Equal(array1, array2) == Equal(type1, type2)); + } + } + } + + void Function() { + // Constructors + for (int i = 0; i < 20; ++i) { + for (int j = 0; j < 20; ++j) { + for (int k = 0; k < 20; ++k) { + TypeHandle type1 = T.Random(); + TypeHandle type2 = T.Random(); + TypeHandle type3 = T.Random(); + TypeHandle function0 = T.Function0(type1, type2); + TypeHandle function1 = T.Function1(type1, type2, type3); + TypeHandle function2 = T.Function2(type1, type2, type3); + CHECK(function0->IsFunction()); + CHECK(function1->IsFunction()); + CHECK(function2->IsFunction()); + } + } + } + + // Attributes + for (int i = 0; i < 20; ++i) { + for (int j = 0; j < 20; ++j) { + for (int k = 0; k < 20; ++k) { + TypeHandle type1 = T.Random(); + TypeHandle type2 = T.Random(); + TypeHandle type3 = T.Random(); + TypeHandle function0 = T.Function0(type1, type2); + TypeHandle function1 = T.Function1(type1, type2, type3); + TypeHandle function2 = T.Function2(type1, type2, type3); + CHECK_EQ(0, function0->AsFunction()->Arity()); + CHECK_EQ(1, function1->AsFunction()->Arity()); + CHECK_EQ(2, function2->AsFunction()->Arity()); + CheckEqual(type1, function0->AsFunction()->Result()); + CheckEqual(type1, function1->AsFunction()->Result()); + CheckEqual(type1, function2->AsFunction()->Result()); + CheckEqual(type2, function0->AsFunction()->Receiver()); + CheckEqual(type2, function1->AsFunction()->Receiver()); + CheckEqual(T.Any, function2->AsFunction()->Receiver()); + CheckEqual(type3, function1->AsFunction()->Parameter(0)); + CheckEqual(type2, function2->AsFunction()->Parameter(0)); + CheckEqual(type3, function2->AsFunction()->Parameter(1)); + } + } + } + + // Functionality & Injectivity: Function(Ts1) = Function(Ts2) iff Ts1 = Ts2 + for (int i = 0; i < 20; ++i) { + for (int j = 0; j < 20; ++j) { + for (int k = 0; k < 20; ++k) { + TypeHandle type1 = T.Random(); + TypeHandle type2 = T.Random(); + TypeHandle type3 = T.Random(); + TypeHandle function01 = T.Function0(type1, type2); + TypeHandle function02 = T.Function0(type1, type3); + TypeHandle function03 = T.Function0(type3, type2); + TypeHandle function11 = T.Function1(type1, type2, type2); + TypeHandle function12 = T.Function1(type1, type2, type3); + TypeHandle function21 = T.Function2(type1, type2, type2); + TypeHandle function22 = T.Function2(type1, type2, type3); + TypeHandle function23 = T.Function2(type1, type3, type2); + CHECK(Equal(function01, function02) == Equal(type2, type3)); + CHECK(Equal(function01, function03) == Equal(type1, type3)); + CHECK(Equal(function11, function12) == Equal(type2, type3)); + CHECK(Equal(function21, function22) == Equal(type2, type3)); + CHECK(Equal(function21, function23) == Equal(type2, type3)); + } + } + } + } - CHECK(*T.smi == this->AsConstant(T.SmiConstant)); - CHECK(*T.object1 == this->AsConstant(T.ObjectConstant1)); - CHECK(*T.object2 == this->AsConstant(T.ObjectConstant2)); - CHECK(*T.object1 != this->AsConstant(T.ObjectConstant2)); - CHECK(*T.array == this->AsConstant(T.ArrayConstant1)); - CHECK(*T.array == this->AsConstant(T.ArrayConstant2)); + void Of() { + // Constant(V)->Is(Of(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle of_type = T.Of(value); + CHECK(const_type->Is(of_type)); + } + + // Constant(V)->Is(T) iff Of(V)->Is(T) or T->Maybe(Constant(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + Handle<i::Object> value = *vt; + TypeHandle type = *it; + TypeHandle const_type = T.Constant(value); + TypeHandle of_type = T.Of(value); + CHECK(const_type->Is(type) == + (of_type->Is(type) || type->Maybe(const_type))); + } + } + } + + void NowOf() { + // Constant(V)->NowIs(NowOf(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle nowof_type = T.NowOf(value); + CHECK(const_type->NowIs(nowof_type)); + } + + // NowOf(V)->Is(Of(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Object> value = *vt; + TypeHandle nowof_type = T.NowOf(value); + TypeHandle of_type = T.Of(value); + CHECK(nowof_type->Is(of_type)); + } + + // Constant(V)->NowIs(T) iff NowOf(V)->NowIs(T) or T->Maybe(Constant(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + Handle<i::Object> value = *vt; + TypeHandle type = *it; + TypeHandle const_type = T.Constant(value); + TypeHandle nowof_type = T.NowOf(value); + CHECK(const_type->NowIs(type) == + (nowof_type->NowIs(type) || type->Maybe(const_type))); + } + } + + // Constant(V)->Is(T) implies NowOf(V)->Is(T) or T->Maybe(Constant(V)) + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + Handle<i::Object> value = *vt; + TypeHandle type = *it; + TypeHandle const_type = T.Constant(value); + TypeHandle nowof_type = T.NowOf(value); + CHECK(!const_type->Is(type) || + (nowof_type->Is(type) || type->Maybe(const_type))); + } + } } void Is() { - // Reflexivity - CHECK(T.None->Is(T.None)); - CHECK(T.Any->Is(T.Any)); - CHECK(T.Object->Is(T.Object)); - - CHECK(T.ObjectClass->Is(T.ObjectClass)); - CHECK(T.ObjectConstant1->Is(T.ObjectConstant1)); - CHECK(T.ArrayConstant1->Is(T.ArrayConstant2)); - - // Symmetry and Transitivity - CheckSub(T.None, T.Number); - CheckSub(T.None, T.Any); - - CheckSub(T.Oddball, T.Any); - CheckSub(T.Boolean, T.Oddball); - CheckSub(T.Null, T.Oddball); - CheckSub(T.Undefined, T.Oddball); + // Least Element (Bottom): None->Is(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(T.None->Is(type)); + } + + // Greatest Element (Top): T->Is(Any) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->Is(T.Any)); + } + + // Bottom Uniqueness: T->Is(None) implies T = None + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + if (type->Is(T.None)) CheckEqual(type, T.None); + } + + // Top Uniqueness: Any->Is(T) implies T = Any + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + if (T.Any->Is(type)) CheckEqual(type, T.Any); + } + + // Reflexivity: T->Is(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->Is(type)); + } + + // Transitivity: T1->Is(T2) and T2->Is(T3) implies T1->Is(T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + CHECK(!(type1->Is(type2) && type2->Is(type3)) || type1->Is(type3)); + } + } + } + + // Antisymmetry: T1->Is(T2) and T2->Is(T1) iff T1 = T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK((type1->Is(type2) && type2->Is(type1)) == Equal(type1, type2)); + } + } + + // Constant(V1)->Is(Constant(V2)) iff V1 = V2 + for (ValueIterator vt1 = T.values.begin(); vt1 != T.values.end(); ++vt1) { + for (ValueIterator vt2 = T.values.begin(); vt2 != T.values.end(); ++vt2) { + Handle<i::Object> value1 = *vt1; + Handle<i::Object> value2 = *vt2; + TypeHandle const_type1 = T.Constant(value1); + TypeHandle const_type2 = T.Constant(value2); + CHECK(const_type1->Is(const_type2) == (*value1 == *value2)); + } + } + + // Class(M1)->Is(Class(M2)) iff M1 = M2 + for (MapIterator mt1 = T.maps.begin(); mt1 != T.maps.end(); ++mt1) { + for (MapIterator mt2 = T.maps.begin(); mt2 != T.maps.end(); ++mt2) { + Handle<i::Map> map1 = *mt1; + Handle<i::Map> map2 = *mt2; + TypeHandle class_type1 = T.Class(map1); + TypeHandle class_type2 = T.Class(map2); + CHECK(class_type1->Is(class_type2) == (*map1 == *map2)); + } + } + + // Constant(V)->Is(Class(M)) never + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle constant_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK(!constant_type->Is(class_type)); + } + } + + // Class(M)->Is(Constant(V)) never + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle constant_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK(!class_type->Is(constant_type)); + } + } + + // Basic types CheckUnordered(T.Boolean, T.Null); CheckUnordered(T.Undefined, T.Null); CheckUnordered(T.Boolean, T.Undefined); - CheckSub(T.Number, T.Any); CheckSub(T.SignedSmall, T.Number); CheckSub(T.Signed32, T.Number); CheckSub(T.Float, T.Number); @@ -367,8 +777,6 @@ struct Tests : Rep { CheckUnordered(T.SignedSmall, T.Float); CheckUnordered(T.Signed32, T.Float); - CheckSub(T.Name, T.Any); - CheckSub(T.UniqueName, T.Any); CheckSub(T.UniqueName, T.Name); CheckSub(T.String, T.Name); CheckSub(T.InternalizedString, T.String); @@ -380,8 +788,6 @@ struct Tests : Rep { CheckUnordered(T.String, T.Symbol); CheckUnordered(T.InternalizedString, T.Symbol); - CheckSub(T.Receiver, T.Any); - CheckSub(T.Object, T.Any); CheckSub(T.Object, T.Receiver); CheckSub(T.Array, T.Object); CheckSub(T.Function, T.Object); @@ -389,52 +795,328 @@ struct Tests : Rep { CheckUnordered(T.Object, T.Proxy); CheckUnordered(T.Array, T.Function); - // Structured subtyping - CheckSub(T.None, T.ObjectClass); - CheckSub(T.None, T.ObjectConstant1); - CheckSub(T.ObjectClass, T.Any); - CheckSub(T.ObjectConstant1, T.Any); - + // Structural types CheckSub(T.ObjectClass, T.Object); CheckSub(T.ArrayClass, T.Object); + CheckSub(T.ArrayClass, T.Array); + CheckSub(T.UninitializedClass, T.Internal); CheckUnordered(T.ObjectClass, T.ArrayClass); + CheckUnordered(T.UninitializedClass, T.Null); + CheckUnordered(T.UninitializedClass, T.Undefined); CheckSub(T.SmiConstant, T.SignedSmall); CheckSub(T.SmiConstant, T.Signed32); CheckSub(T.SmiConstant, T.Number); CheckSub(T.ObjectConstant1, T.Object); CheckSub(T.ObjectConstant2, T.Object); - CheckSub(T.ArrayConstant1, T.Object); - CheckSub(T.ArrayConstant1, T.Array); + CheckSub(T.ArrayConstant, T.Object); + CheckSub(T.ArrayConstant, T.Array); + CheckSub(T.UninitializedConstant, T.Internal); CheckUnordered(T.ObjectConstant1, T.ObjectConstant2); - CheckUnordered(T.ObjectConstant1, T.ArrayConstant1); + CheckUnordered(T.ObjectConstant1, T.ArrayConstant); + CheckUnordered(T.UninitializedConstant, T.Null); + CheckUnordered(T.UninitializedConstant, T.Undefined); CheckUnordered(T.ObjectConstant1, T.ObjectClass); CheckUnordered(T.ObjectConstant2, T.ObjectClass); CheckUnordered(T.ObjectConstant1, T.ArrayClass); CheckUnordered(T.ObjectConstant2, T.ArrayClass); - CheckUnordered(T.ArrayConstant1, T.ObjectClass); + CheckUnordered(T.ArrayConstant, T.ObjectClass); + + CheckSub(T.FloatArray, T.Array); + CheckSub(T.FloatArray, T.Object); + CheckUnordered(T.StringArray, T.AnyArray); + + CheckSub(T.MethodFunction, T.Function); + CheckSub(T.NumberFunction1, T.Object); + CheckUnordered(T.SignedFunction1, T.NumberFunction1); + CheckUnordered(T.NumberFunction1, T.NumberFunction2); + } + + void NowIs() { + // Least Element (Bottom): None->NowIs(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(T.None->NowIs(type)); + } + + // Greatest Element (Top): T->NowIs(Any) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->NowIs(T.Any)); + } + + // Bottom Uniqueness: T->NowIs(None) implies T = None + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + if (type->NowIs(T.None)) CheckEqual(type, T.None); + } + + // Top Uniqueness: Any->NowIs(T) implies T = Any + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + if (T.Any->NowIs(type)) CheckEqual(type, T.Any); + } + + // Reflexivity: T->NowIs(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->NowIs(type)); + } + + // Transitivity: T1->NowIs(T2) and T2->NowIs(T3) implies T1->NowIs(T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + CHECK(!(type1->NowIs(type2) && type2->NowIs(type3)) || + type1->NowIs(type3)); + } + } + } + + // Antisymmetry: T1->NowIs(T2) and T2->NowIs(T1) iff T1 = T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK((type1->NowIs(type2) && type2->NowIs(type1)) == + Equal(type1, type2)); + } + } + + // T1->Is(T2) implies T1->NowIs(T2) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK(!type1->Is(type2) || type1->NowIs(type2)); + } + } + + // Constant(V1)->NowIs(Constant(V2)) iff V1 = V2 + for (ValueIterator vt1 = T.values.begin(); vt1 != T.values.end(); ++vt1) { + for (ValueIterator vt2 = T.values.begin(); vt2 != T.values.end(); ++vt2) { + Handle<i::Object> value1 = *vt1; + Handle<i::Object> value2 = *vt2; + TypeHandle const_type1 = T.Constant(value1); + TypeHandle const_type2 = T.Constant(value2); + CHECK(const_type1->NowIs(const_type2) == (*value1 == *value2)); + } + } + + // Class(M1)->NowIs(Class(M2)) iff M1 = M2 + for (MapIterator mt1 = T.maps.begin(); mt1 != T.maps.end(); ++mt1) { + for (MapIterator mt2 = T.maps.begin(); mt2 != T.maps.end(); ++mt2) { + Handle<i::Map> map1 = *mt1; + Handle<i::Map> map2 = *mt2; + TypeHandle class_type1 = T.Class(map1); + TypeHandle class_type2 = T.Class(map2); + CHECK(class_type1->NowIs(class_type2) == (*map1 == *map2)); + } + } + + // Constant(V)->NowIs(Class(M)) iff V has map M + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK((value->IsHeapObject() && + i::HeapObject::cast(*value)->map() == *map) + == const_type->NowIs(class_type)); + } + } + + // Class(M)->NowIs(Constant(V)) never + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK(!class_type->NowIs(const_type)); + } + } + } + + void Contains() { + // T->Contains(V) iff Constant(V)->Is(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + CHECK(type->Contains(value) == const_type->Is(type)); + } + } + + // Of(V)->Is(T) implies T->Contains(V) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + TypeHandle of_type = T.Of(value); + CHECK(!of_type->Is(type) || type->Contains(value)); + } + } + } + + void NowContains() { + // T->NowContains(V) iff Constant(V)->NowIs(T) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + CHECK(type->NowContains(value) == const_type->NowIs(type)); + } + } + + // T->Contains(V) implies T->NowContains(V) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + CHECK(!type->Contains(value) || type->NowContains(value)); + } + } + + // NowOf(V)->Is(T) implies T->NowContains(V) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + TypeHandle nowof_type = T.Of(value); + CHECK(!nowof_type->NowIs(type) || type->NowContains(value)); + } + } + + // NowOf(V)->NowIs(T) implies T->NowContains(V) + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + TypeHandle type = *it; + Handle<i::Object> value = *vt; + TypeHandle nowof_type = T.Of(value); + CHECK(!nowof_type->NowIs(type) || type->NowContains(value)); + } + } } void Maybe() { - CheckOverlap(T.Any, T.Any, T.Semantic); - CheckOverlap(T.Object, T.Object, T.Semantic); + // T->Maybe(Any) iff T inhabited + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->Maybe(T.Any) == type->IsInhabited()); + } + + // T->Maybe(None) never + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(!type->Maybe(T.None)); + } + + // Reflexivity upto Inhabitation: T->Maybe(T) iff T inhabited + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + CHECK(type->Maybe(type) == type->IsInhabited()); + } + + // Symmetry: T1->Maybe(T2) iff T2->Maybe(T1) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK(type1->Maybe(type2) == type2->Maybe(type1)); + } + } + + // T1->Maybe(T2) implies T1, T2 inhabited + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK(!type1->Maybe(type2) || + (type1->IsInhabited() && type2->IsInhabited())); + } + } + + // T1->Maybe(T2) iff Intersect(T1, T2) inhabited + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + CHECK(type1->Maybe(type2) == intersect12->IsInhabited()); + } + } + + // T1->Is(T2) and T1 inhabited implies T1->Maybe(T2) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + CHECK(!(type1->Is(type2) && type1->IsInhabited()) || + type1->Maybe(type2)); + } + } + + // Constant(V1)->Maybe(Constant(V2)) iff V1 = V2 + for (ValueIterator vt1 = T.values.begin(); vt1 != T.values.end(); ++vt1) { + for (ValueIterator vt2 = T.values.begin(); vt2 != T.values.end(); ++vt2) { + Handle<i::Object> value1 = *vt1; + Handle<i::Object> value2 = *vt2; + TypeHandle const_type1 = T.Constant(value1); + TypeHandle const_type2 = T.Constant(value2); + CHECK(const_type1->Maybe(const_type2) == (*value1 == *value2)); + } + } - CheckOverlap(T.Oddball, T.Any, T.Semantic); - CheckOverlap(T.Boolean, T.Oddball, T.Semantic); - CheckOverlap(T.Null, T.Oddball, T.Semantic); - CheckOverlap(T.Undefined, T.Oddball, T.Semantic); + // Class(M1)->Maybe(Class(M2)) iff M1 = M2 + for (MapIterator mt1 = T.maps.begin(); mt1 != T.maps.end(); ++mt1) { + for (MapIterator mt2 = T.maps.begin(); mt2 != T.maps.end(); ++mt2) { + Handle<i::Map> map1 = *mt1; + Handle<i::Map> map2 = *mt2; + TypeHandle class_type1 = T.Class(map1); + TypeHandle class_type2 = T.Class(map2); + CHECK(class_type1->Maybe(class_type2) == (*map1 == *map2)); + } + } + + // Constant(V)->Maybe(Class(M)) never + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK(!const_type->Maybe(class_type)); + } + } + + // Class(M)->Maybe(Constant(V)) never + for (MapIterator mt = T.maps.begin(); mt != T.maps.end(); ++mt) { + for (ValueIterator vt = T.values.begin(); vt != T.values.end(); ++vt) { + Handle<i::Map> map = *mt; + Handle<i::Object> value = *vt; + TypeHandle const_type = T.Constant(value); + TypeHandle class_type = T.Class(map); + CHECK(!class_type->Maybe(const_type)); + } + } + + // Basic types CheckDisjoint(T.Boolean, T.Null, T.Semantic); CheckDisjoint(T.Undefined, T.Null, T.Semantic); CheckDisjoint(T.Boolean, T.Undefined, T.Semantic); - CheckOverlap(T.Number, T.Any, T.Semantic); CheckOverlap(T.SignedSmall, T.Number, T.Semantic); CheckOverlap(T.Float, T.Number, T.Semantic); CheckDisjoint(T.Signed32, T.Float, T.Semantic); - CheckOverlap(T.Name, T.Any, T.Semantic); - CheckOverlap(T.UniqueName, T.Any, T.Semantic); CheckOverlap(T.UniqueName, T.Name, T.Semantic); CheckOverlap(T.String, T.Name, T.Semantic); CheckOverlap(T.InternalizedString, T.String, T.Semantic); @@ -446,8 +1128,6 @@ struct Tests : Rep { CheckDisjoint(T.String, T.Symbol, T.Semantic); CheckDisjoint(T.InternalizedString, T.Symbol, T.Semantic); - CheckOverlap(T.Receiver, T.Any, T.Semantic); - CheckOverlap(T.Object, T.Any, T.Semantic); CheckOverlap(T.Object, T.Receiver, T.Semantic); CheckOverlap(T.Array, T.Object, T.Semantic); CheckOverlap(T.Function, T.Object, T.Semantic); @@ -455,9 +1135,7 @@ struct Tests : Rep { CheckDisjoint(T.Object, T.Proxy, T.Semantic); CheckDisjoint(T.Array, T.Function, T.Semantic); - CheckOverlap(T.ObjectClass, T.Any, T.Semantic); - CheckOverlap(T.ObjectConstant1, T.Any, T.Semantic); - + // Structural types CheckOverlap(T.ObjectClass, T.Object, T.Semantic); CheckOverlap(T.ArrayClass, T.Object, T.Semantic); CheckOverlap(T.ObjectClass, T.ObjectClass, T.Semantic); @@ -470,82 +1148,182 @@ struct Tests : Rep { CheckDisjoint(T.SmiConstant, T.Float, T.Semantic); CheckOverlap(T.ObjectConstant1, T.Object, T.Semantic); CheckOverlap(T.ObjectConstant2, T.Object, T.Semantic); - CheckOverlap(T.ArrayConstant1, T.Object, T.Semantic); - CheckOverlap(T.ArrayConstant1, T.Array, T.Semantic); - CheckOverlap(T.ArrayConstant1, T.ArrayConstant2, T.Semantic); + CheckOverlap(T.ArrayConstant, T.Object, T.Semantic); + CheckOverlap(T.ArrayConstant, T.Array, T.Semantic); CheckOverlap(T.ObjectConstant1, T.ObjectConstant1, T.Semantic); CheckDisjoint(T.ObjectConstant1, T.ObjectConstant2, T.Semantic); - CheckDisjoint(T.ObjectConstant1, T.ArrayConstant1, T.Semantic); + CheckDisjoint(T.ObjectConstant1, T.ArrayConstant, T.Semantic); CheckDisjoint(T.ObjectConstant1, T.ObjectClass, T.Semantic); CheckDisjoint(T.ObjectConstant2, T.ObjectClass, T.Semantic); CheckDisjoint(T.ObjectConstant1, T.ArrayClass, T.Semantic); CheckDisjoint(T.ObjectConstant2, T.ArrayClass, T.Semantic); - CheckDisjoint(T.ArrayConstant1, T.ObjectClass, T.Semantic); + CheckDisjoint(T.ArrayConstant, T.ObjectClass, T.Semantic); + + CheckOverlap(T.FloatArray, T.Array, T.Semantic); + CheckDisjoint(T.FloatArray, T.AnyArray, T.Semantic); + CheckDisjoint(T.FloatArray, T.StringArray, T.Semantic); + + CheckOverlap(T.MethodFunction, T.Function, T.Semantic); + CheckDisjoint(T.SignedFunction1, T.NumberFunction1, T.Semantic); + CheckDisjoint(T.SignedFunction1, T.NumberFunction2, T.Semantic); + CheckDisjoint(T.NumberFunction1, T.NumberFunction2, T.Semantic); + CheckDisjoint(T.SignedFunction1, T.MethodFunction, T.Semantic); + } + + void Union1() { + // Identity: Union(T, None) = T + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle union_type = T.Union(type, T.None); + CheckEqual(union_type, type); + } + + // Domination: Union(T, Any) = Any + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle union_type = T.Union(type, T.Any); + CheckEqual(union_type, T.Any); + } + + // Idempotence: Union(T, T) = T + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle union_type = T.Union(type, type); + CheckEqual(union_type, type); + } + + // Commutativity: Union(T1, T2) = Union(T2, T1) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + TypeHandle union21 = T.Union(type2, type1); + CheckEqual(union12, union21); + } + } + + // Associativity: Union(T1, Union(T2, T3)) = Union(Union(T1, T2), T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle union12 = T.Union(type1, type2); + TypeHandle union23 = T.Union(type2, type3); + TypeHandle union1_23 = T.Union(type1, union23); + TypeHandle union12_3 = T.Union(union12, type3); + CheckEqual(union1_23, union12_3); + } + } + } + + // Meet: T1->Is(Union(T1, T2)) and T2->Is(Union(T1, T2)) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + CHECK(type1->Is(union12)); + CHECK(type2->Is(union12)); + } + } + + // Upper Boundedness: T1->Is(T2) implies Union(T1, T2) = T2 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle union12 = T.Union(type1, type2); + if (type1->Is(type2)) CheckEqual(union12, type2); + } + } } - void Union() { - // Bitset-bitset - CHECK(this->IsBitset(T.Union(T.Object, T.Number))); - CHECK(this->IsBitset(T.Union(T.Object, T.Object))); - CHECK(this->IsBitset(T.Union(T.Any, T.None))); + void Union2() { + // Monotonicity: T1->Is(T2) implies Union(T1, T3)->Is(Union(T2, T3)) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle union13 = T.Union(type1, type3); + TypeHandle union23 = T.Union(type2, type3); + CHECK(!type1->Is(type2) || union13->Is(union23)); + } + } + } - CheckEqual(T.Union(T.None, T.Number), T.Number); - CheckEqual(T.Union(T.Object, T.Proxy), T.Receiver); - CheckEqual(T.Union(T.Number, T.String), T.Union(T.String, T.Number)); - CheckSub(T.Union(T.Number, T.String), T.Any); + // Monotonicity: T1->Is(T3) and T2->Is(T3) implies Union(T1, T2)->Is(T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle union12 = T.Union(type1, type2); + CHECK(!(type1->Is(type3) && type2->Is(type3)) || union12->Is(type3)); + } + } + } + + // Monotonicity: T1->Is(T2) or T1->Is(T3) implies T1->Is(Union(T2, T3)) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle union23 = T.Union(type2, type3); + CHECK(!(type1->Is(type2) || type1->Is(type3)) || type1->Is(union23)); + } + } + } // Class-class - CHECK(this->IsClass(T.Union(T.ObjectClass, T.ObjectClass))); - CHECK(this->IsUnion(T.Union(T.ObjectClass, T.ArrayClass))); - - CheckEqual(T.Union(T.ObjectClass, T.ObjectClass), T.ObjectClass); - CheckSub(T.None, T.Union(T.ObjectClass, T.ArrayClass)); - CheckSub(T.Union(T.ObjectClass, T.ArrayClass), T.Any); - CheckSub(T.ObjectClass, T.Union(T.ObjectClass, T.ArrayClass)); - CheckSub(T.ArrayClass, T.Union(T.ObjectClass, T.ArrayClass)); CheckSub(T.Union(T.ObjectClass, T.ArrayClass), T.Object); CheckUnordered(T.Union(T.ObjectClass, T.ArrayClass), T.Array); CheckOverlap(T.Union(T.ObjectClass, T.ArrayClass), T.Array, T.Semantic); CheckDisjoint(T.Union(T.ObjectClass, T.ArrayClass), T.Number, T.Semantic); // Constant-constant - CHECK(this->IsConstant(T.Union(T.ObjectConstant1, T.ObjectConstant1))); - CHECK(this->IsConstant(T.Union(T.ArrayConstant1, T.ArrayConstant1))); - CHECK(this->IsUnion(T.Union(T.ObjectConstant1, T.ObjectConstant2))); - - CheckEqual( - T.Union(T.ObjectConstant1, T.ObjectConstant1), - T.ObjectConstant1); - CheckEqual(T.Union(T.ArrayConstant1, T.ArrayConstant1), T.ArrayConstant1); - CheckEqual(T.Union(T.ArrayConstant1, T.ArrayConstant1), T.ArrayConstant2); - CheckSub(T.None, T.Union(T.ObjectConstant1, T.ObjectConstant2)); - CheckSub(T.Union(T.ObjectConstant1, T.ObjectConstant2), T.Any); - CheckSub(T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ObjectConstant2)); - CheckSub(T.ObjectConstant2, T.Union(T.ObjectConstant1, T.ObjectConstant2)); - CheckSub(T.ArrayConstant2, T.Union(T.ArrayConstant1, T.ObjectConstant2)); CheckSub(T.Union(T.ObjectConstant1, T.ObjectConstant2), T.Object); + CheckUnordered(T.Union(T.ObjectConstant1, T.ArrayConstant), T.Array); CheckUnordered( T.Union(T.ObjectConstant1, T.ObjectConstant2), T.ObjectClass); - CheckUnordered(T.Union(T.ObjectConstant1, T.ArrayConstant1), T.Array); CheckOverlap( - T.Union(T.ObjectConstant1, T.ArrayConstant1), T.Array, T.Semantic); - CheckOverlap( - T.Union(T.ObjectConstant1, T.ArrayConstant1), T.ArrayConstant2, - T.Semantic); + T.Union(T.ObjectConstant1, T.ArrayConstant), T.Array, T.Semantic); CheckDisjoint( - T.Union(T.ObjectConstant1, T.ArrayConstant1), T.Number, T.Semantic); + T.Union(T.ObjectConstant1, T.ArrayConstant), T.Number, T.Semantic); CheckDisjoint( - T.Union(T.ObjectConstant1, T.ArrayConstant1), T.ObjectClass, - T.Semantic); + T.Union(T.ObjectConstant1, T.ArrayConstant), T.ObjectClass, T.Semantic); + + // Bitset-array + CHECK(this->IsBitset(T.Union(T.AnyArray, T.Array))); + CHECK(this->IsUnion(T.Union(T.FloatArray, T.Number))); + + CheckEqual(T.Union(T.AnyArray, T.Array), T.Array); + CheckSub(T.None, T.Union(T.FloatArray, T.Number)); + CheckSub(T.Union(T.FloatArray, T.Number), T.Any); + CheckUnordered(T.Union(T.AnyArray, T.String), T.Array); + CheckOverlap(T.Union(T.FloatArray, T.String), T.Object, T.Semantic); + CheckDisjoint(T.Union(T.FloatArray, T.String), T.Number, T.Semantic); + + // Bitset-function + CHECK(this->IsBitset(T.Union(T.MethodFunction, T.Function))); + CHECK(this->IsUnion(T.Union(T.NumberFunction1, T.Number))); + + CheckEqual(T.Union(T.MethodFunction, T.Function), T.Function); + CheckSub(T.None, T.Union(T.MethodFunction, T.Number)); + CheckSub(T.Union(T.MethodFunction, T.Number), T.Any); + CheckUnordered(T.Union(T.NumberFunction1, T.String), T.Function); + CheckOverlap(T.Union(T.NumberFunction2, T.String), T.Object, T.Semantic); + CheckDisjoint(T.Union(T.NumberFunction1, T.String), T.Number, T.Semantic); // Bitset-class - CHECK(this->IsBitset(T.Union(T.ObjectClass, T.Object))); - CHECK(this->IsUnion(T.Union(T.ObjectClass, T.Number))); - - CheckEqual(T.Union(T.ObjectClass, T.Object), T.Object); - CheckSub(T.None, T.Union(T.ObjectClass, T.Number)); - CheckSub(T.Union(T.ObjectClass, T.Number), T.Any); CheckSub( T.Union(T.ObjectClass, T.SignedSmall), T.Union(T.Object, T.Number)); CheckSub(T.Union(T.ObjectClass, T.Array), T.Object); @@ -554,35 +1332,19 @@ struct Tests : Rep { CheckDisjoint(T.Union(T.ObjectClass, T.String), T.Number, T.Semantic); // Bitset-constant - CHECK(this->IsBitset(T.Union(T.SmiConstant, T.Number))); - CHECK(this->IsBitset(T.Union(T.ObjectConstant1, T.Object))); - CHECK(this->IsUnion(T.Union(T.ObjectConstant2, T.Number))); - - CheckEqual(T.Union(T.SmiConstant, T.Number), T.Number); - CheckEqual(T.Union(T.ObjectConstant1, T.Object), T.Object); - CheckSub(T.None, T.Union(T.ObjectConstant1, T.Number)); - CheckSub(T.Union(T.ObjectConstant1, T.Number), T.Any); CheckSub( T.Union(T.ObjectConstant1, T.Signed32), T.Union(T.Object, T.Number)); CheckSub(T.Union(T.ObjectConstant1, T.Array), T.Object); CheckUnordered(T.Union(T.ObjectConstant1, T.String), T.Array); CheckOverlap(T.Union(T.ObjectConstant1, T.String), T.Object, T.Semantic); CheckDisjoint(T.Union(T.ObjectConstant1, T.String), T.Number, T.Semantic); - CheckEqual(T.Union(T.Signed32, T.Signed32Constant), T.Signed32); // Class-constant - CHECK(this->IsUnion(T.Union(T.ObjectConstant1, T.ObjectClass))); - CHECK(this->IsUnion(T.Union(T.ArrayClass, T.ObjectConstant2))); - - CheckSub(T.None, T.Union(T.ObjectConstant1, T.ArrayClass)); - CheckSub(T.Union(T.ObjectConstant1, T.ArrayClass), T.Any); CheckSub(T.Union(T.ObjectConstant1, T.ArrayClass), T.Object); - CheckSub(T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ArrayClass)); - CheckSub(T.ArrayClass, T.Union(T.ObjectConstant1, T.ArrayClass)); CheckUnordered(T.ObjectClass, T.Union(T.ObjectConstant1, T.ArrayClass)); CheckSub( T.Union(T.ObjectConstant1, T.ArrayClass), T.Union(T.Array, T.Object)); - CheckUnordered(T.Union(T.ObjectConstant1, T.ArrayClass), T.ArrayConstant1); + CheckUnordered(T.Union(T.ObjectConstant1, T.ArrayClass), T.ArrayConstant); CheckDisjoint( T.Union(T.ObjectConstant1, T.ArrayClass), T.ObjectConstant2, T.Semantic); @@ -590,48 +1352,14 @@ struct Tests : Rep { T.Union(T.ObjectConstant1, T.ArrayClass), T.ObjectClass, T.Semantic); // Bitset-union - CHECK(this->IsBitset( - T.Union(T.Object, T.Union(T.ObjectConstant1, T.ObjectClass)))); - CHECK(this->IsUnion( - T.Union(T.Union(T.ArrayClass, T.ObjectConstant2), T.Number))); - - CheckEqual( - T.Union(T.Object, T.Union(T.ObjectConstant1, T.ObjectClass)), - T.Object); - CheckEqual( - T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Number), - T.Union(T.ObjectConstant1, T.Union(T.Number, T.ArrayClass))); CheckSub( T.Float, T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Number)); CheckSub( - T.ObjectConstant1, - T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Float)); - CheckSub( - T.None, - T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Float)); - CheckSub( - T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Float), - T.Any); - CheckSub( T.Union(T.Union(T.ArrayClass, T.ObjectConstant1), T.Float), T.Union(T.ObjectConstant1, T.Union(T.Number, T.ArrayClass))); // Class-union - CHECK(this->IsUnion( - T.Union(T.Union(T.ArrayClass, T.ObjectConstant2), T.ArrayClass))); - CHECK(this->IsUnion( - T.Union(T.Union(T.ArrayClass, T.ObjectConstant2), T.ObjectClass))); - - CheckEqual( - T.Union(T.ObjectClass, T.Union(T.ObjectConstant1, T.ObjectClass)), - T.Union(T.ObjectClass, T.ObjectConstant1)); - CheckSub( - T.None, - T.Union(T.ObjectClass, T.Union(T.ObjectConstant1, T.ObjectClass))); - CheckSub( - T.Union(T.ObjectClass, T.Union(T.ObjectConstant1, T.ObjectClass)), - T.Any); CheckSub( T.Union(T.ObjectClass, T.Union(T.ObjectConstant1, T.ObjectClass)), T.Object); @@ -640,31 +1368,29 @@ struct Tests : Rep { T.Union(T.ArrayClass, T.ObjectConstant2)); // Constant-union - CHECK(this->IsUnion(T.Union( - T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ObjectConstant2)))); - CHECK(this->IsUnion(T.Union( - T.Union(T.ArrayConstant1, T.ObjectClass), T.ObjectConstant1))); - CHECK(this->IsUnion(T.Union( - T.Union(T.ArrayConstant1, T.ObjectConstant2), T.ObjectConstant1))); - CheckEqual( T.Union( T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ObjectConstant2)), T.Union(T.ObjectConstant2, T.ObjectConstant1)); CheckEqual( T.Union( - T.Union(T.ArrayConstant1, T.ObjectConstant2), T.ObjectConstant1), + T.Union(T.ArrayConstant, T.ObjectConstant2), T.ObjectConstant1), T.Union( - T.ObjectConstant2, T.Union(T.ArrayConstant1, T.ObjectConstant1))); + T.ObjectConstant2, T.Union(T.ArrayConstant, T.ObjectConstant1))); - // Union-union - CHECK(this->IsBitset(T.Union( - T.Union(T.Number, T.ArrayClass), - T.Union(T.Signed32, T.Array)))); - CHECK(this->IsUnion(T.Union( - T.Union(T.Number, T.ArrayClass), - T.Union(T.ObjectClass, T.ArrayClass)))); + // Array-union + CheckEqual( + T.Union(T.AnyArray, T.Union(T.FloatArray, T.AnyArray)), + T.Union(T.AnyArray, T.FloatArray)); + CheckSub(T.Union(T.AnyArray, T.FloatArray), T.Array); + // Function-union + CheckEqual( + T.Union(T.NumberFunction1, T.NumberFunction2), + T.Union(T.NumberFunction2, T.NumberFunction1)); + CheckSub(T.Union(T.SignedFunction1, T.MethodFunction), T.Function); + + // Union-union CheckEqual( T.Union( T.Union(T.ObjectConstant2, T.ObjectConstant1), @@ -672,92 +1398,174 @@ struct Tests : Rep { T.Union(T.ObjectConstant2, T.ObjectConstant1)); CheckEqual( T.Union( - T.Union(T.ObjectConstant2, T.ArrayConstant1), - T.Union(T.ObjectConstant1, T.ArrayConstant2)), - T.Union( - T.Union(T.ObjectConstant1, T.ObjectConstant2), - T.ArrayConstant1)); - CheckEqual( - T.Union( T.Union(T.Number, T.ArrayClass), T.Union(T.SignedSmall, T.Array)), T.Union(T.Number, T.Array)); } - void Intersect() { - // Bitset-bitset - CHECK(this->IsBitset(T.Intersect(T.Object, T.Number))); - CHECK(this->IsBitset(T.Intersect(T.Object, T.Object))); - CHECK(this->IsBitset(T.Intersect(T.Any, T.None))); + void Intersect1() { + // Identity: Intersect(T, Any) = T + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle intersect_type = T.Intersect(type, T.Any); + CheckEqual(intersect_type, type); + } - CheckEqual(T.Intersect(T.None, T.Number), T.None); - CheckSub(T.Intersect(T.Object, T.Proxy), T.Representation); - CheckEqual(T.Intersect(T.Name, T.String), T.Intersect(T.String, T.Name)); - CheckEqual(T.Intersect(T.UniqueName, T.String), T.InternalizedString); + // Domination: Intersect(T, None) = None + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle intersect_type = T.Intersect(type, T.None); + CheckEqual(intersect_type, T.None); + } - // Class-class - CHECK(this->IsClass(T.Intersect(T.ObjectClass, T.ObjectClass))); - CHECK(this->IsBitset(T.Intersect(T.ObjectClass, T.ArrayClass))); + // Idempotence: Intersect(T, T) = T + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type = *it; + TypeHandle intersect_type = T.Intersect(type, type); + CheckEqual(intersect_type, type); + } + + // Commutativity: Intersect(T1, T2) = Intersect(T2, T1) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + TypeHandle intersect21 = T.Intersect(type2, type1); + CheckEqual(intersect12, intersect21); + } + } - CheckEqual(T.Intersect(T.ObjectClass, T.ObjectClass), T.ObjectClass); - CheckEqual(T.Intersect(T.ObjectClass, T.ArrayClass), T.None); + // Associativity: + // Intersect(T1, Intersect(T2, T3)) = Intersect(Intersect(T1, T2), T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle intersect12 = T.Intersect(type1, type2); + TypeHandle intersect23 = T.Intersect(type2, type3); + TypeHandle intersect1_23 = T.Intersect(type1, intersect23); + TypeHandle intersect12_3 = T.Intersect(intersect12, type3); + CheckEqual(intersect1_23, intersect12_3); + } + } + } - // Constant-constant - CHECK(this->IsConstant(T.Intersect(T.ObjectConstant1, T.ObjectConstant1))); - CHECK(this->IsConstant(T.Intersect(T.ArrayConstant1, T.ArrayConstant2))); - CHECK(this->IsBitset(T.Intersect(T.ObjectConstant1, T.ObjectConstant2))); + // Join: Intersect(T1, T2)->Is(T1) and Intersect(T1, T2)->Is(T2) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + CHECK(intersect12->Is(type1)); + CHECK(intersect12->Is(type2)); + } + } - CheckEqual( - T.Intersect(T.ObjectConstant1, T.ObjectConstant1), T.ObjectConstant1); - CheckEqual( - T.Intersect(T.ArrayConstant1, T.ArrayConstant2), T.ArrayConstant1); - CheckEqual(T.Intersect(T.ObjectConstant1, T.ObjectConstant2), T.None); + // Lower Boundedness: T1->Is(T2) implies Intersect(T1, T2) = T1 + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle intersect12 = T.Intersect(type1, type2); + if (type1->Is(type2)) CheckEqual(intersect12, type1); + } + } + } - // Bitset-class - CHECK(this->IsClass(T.Intersect(T.ObjectClass, T.Object))); - CHECK(this->IsBitset(T.Intersect(T.ObjectClass, T.Number))); + void Intersect2() { + // Monotonicity: T1->Is(T2) implies Intersect(T1, T3)->Is(Intersect(T2, T3)) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle intersect13 = T.Intersect(type1, type3); + TypeHandle intersect23 = T.Intersect(type2, type3); + CHECK(!type1->Is(type2) || intersect13->Is(intersect23)); + } + } + } + + // Monotonicity: T1->Is(T3) or T2->Is(T3) implies Intersect(T1, T2)->Is(T3) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle intersect12 = T.Intersect(type1, type2); + CHECK(!(type1->Is(type3) || type2->Is(type3)) || + intersect12->Is(type3)); + } + } + } + + // Monotonicity: T1->Is(T2) and T1->Is(T3) implies T1->Is(Intersect(T2, T3)) + for (TypeIterator it1 = T.types.begin(); it1 != T.types.end(); ++it1) { + for (TypeIterator it2 = T.types.begin(); it2 != T.types.end(); ++it2) { + for (TypeIterator it3 = T.types.begin(); it3 != T.types.end(); ++it3) { + TypeHandle type1 = *it1; + TypeHandle type2 = *it2; + TypeHandle type3 = *it3; + TypeHandle intersect23 = T.Intersect(type2, type3); + CHECK(!(type1->Is(type2) && type1->Is(type3)) || + type1->Is(intersect23)); + } + } + } + // Bitset-class CheckEqual(T.Intersect(T.ObjectClass, T.Object), T.ObjectClass); CheckSub(T.Intersect(T.ObjectClass, T.Array), T.Representation); CheckSub(T.Intersect(T.ObjectClass, T.Number), T.Representation); - // Bitset-constant - CHECK(this->IsBitset(T.Intersect(T.SignedSmall, T.Number))); - CHECK(this->IsConstant(T.Intersect(T.SmiConstant, T.Number))); - CHECK(this->IsConstant(T.Intersect(T.ObjectConstant1, T.Object))); + // Bitset-array + CheckEqual(T.Intersect(T.FloatArray, T.Object), T.FloatArray); + CheckSub(T.Intersect(T.AnyArray, T.Function), T.Representation); - CheckEqual(T.Intersect(T.SignedSmall, T.Number), T.SignedSmall); - CheckEqual(T.Intersect(T.SmiConstant, T.Number), T.SmiConstant); - CheckEqual(T.Intersect(T.ObjectConstant1, T.Object), T.ObjectConstant1); + // Bitset-function + CheckEqual(T.Intersect(T.MethodFunction, T.Object), T.MethodFunction); + CheckSub(T.Intersect(T.NumberFunction1, T.Array), T.Representation); - // Class-constant - CHECK(this->IsBitset(T.Intersect(T.ObjectConstant1, T.ObjectClass))); - CHECK(this->IsBitset(T.Intersect(T.ArrayClass, T.ObjectConstant2))); + // Bitset-union + CheckEqual( + T.Intersect(T.Object, T.Union(T.ObjectConstant1, T.ObjectClass)), + T.Union(T.ObjectConstant1, T.ObjectClass)); + CheckEqual( + T.Intersect(T.Union(T.ArrayClass, T.ObjectConstant1), T.Number), + T.None); + // Class-constant CheckEqual(T.Intersect(T.ObjectConstant1, T.ObjectClass), T.None); CheckEqual(T.Intersect(T.ArrayClass, T.ObjectConstant2), T.None); - // Bitset-union - CHECK(this->IsUnion( - T.Intersect(T.Object, T.Union(T.ObjectConstant1, T.ObjectClass)))); - CHECK(this->IsBitset( - T.Intersect(T.Union(T.ArrayClass, T.ObjectConstant2), T.Number))); + // Array-union + CheckEqual( + T.Intersect(T.FloatArray, T.Union(T.FloatArray, T.ArrayClass)), + T.FloatArray); + CheckEqual( + T.Intersect(T.AnyArray, T.Union(T.Object, T.SmiConstant)), + T.AnyArray); + CheckEqual( + T.Intersect(T.Union(T.AnyArray, T.ArrayConstant), T.FloatArray), + T.None); + // Function-union CheckEqual( - T.Intersect(T.Object, T.Union(T.ObjectConstant1, T.ObjectClass)), - T.Union(T.ObjectConstant1, T.ObjectClass)); + T.Intersect(T.MethodFunction, T.Union(T.String, T.MethodFunction)), + T.MethodFunction); CheckEqual( - T.Intersect(T.Union(T.ArrayClass, T.ObjectConstant1), T.Number), + T.Intersect(T.NumberFunction1, T.Union(T.Object, T.SmiConstant)), + T.NumberFunction1); + CheckEqual( + T.Intersect(T.Union(T.MethodFunction, T.Name), T.NumberFunction2), T.None); // Class-union - CHECK(this->IsClass( - T.Intersect(T.Union(T.ArrayClass, T.ObjectConstant2), T.ArrayClass))); - CHECK(this->IsClass( - T.Intersect(T.Union(T.Object, T.SmiConstant), T.ArrayClass))); - CHECK(this->IsBitset( - T.Intersect(T.Union(T.ObjectClass, T.ArrayConstant1), T.ArrayClass))); - CheckEqual( T.Intersect(T.ArrayClass, T.Union(T.ObjectConstant2, T.ArrayClass)), T.ArrayClass); @@ -765,17 +1573,10 @@ struct Tests : Rep { T.Intersect(T.ArrayClass, T.Union(T.Object, T.SmiConstant)), T.ArrayClass); CheckEqual( - T.Intersect(T.Union(T.ObjectClass, T.ArrayConstant1), T.ArrayClass), + T.Intersect(T.Union(T.ObjectClass, T.ArrayConstant), T.ArrayClass), T.None); // Constant-union - CHECK(this->IsConstant(T.Intersect( - T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ObjectConstant2)))); - CHECK(this->IsConstant(T.Intersect( - T.Union(T.Number, T.ObjectClass), T.SmiConstant))); - CHECK(this->IsBitset(T.Intersect( - T.Union(T.ArrayConstant1, T.ObjectClass), T.ObjectConstant1))); - CheckEqual( T.Intersect( T.ObjectConstant1, T.Union(T.ObjectConstant1, T.ObjectConstant2)), @@ -785,15 +1586,10 @@ struct Tests : Rep { T.SmiConstant); CheckEqual( T.Intersect( - T.Union(T.ArrayConstant1, T.ObjectClass), T.ObjectConstant1), + T.Union(T.ArrayConstant, T.ObjectClass), T.ObjectConstant1), T.None); // Union-union - CHECK(this->IsUnion(T.Intersect( - T.Union(T.Number, T.ArrayClass), T.Union(T.Signed32, T.Array)))); - CHECK(this->IsBitset(T.Intersect( - T.Union(T.Number, T.ObjectClass), T.Union(T.Signed32, T.Array)))); - CheckEqual( T.Intersect( T.Union(T.Number, T.ArrayClass), @@ -815,23 +1611,19 @@ struct Tests : Rep { T.Union(T.ObjectConstant2, T.ObjectConstant1), T.ArrayClass), T.Union( T.ObjectConstant1, - T.Union(T.ArrayConstant1, T.ObjectConstant2))), + T.Union(T.ArrayConstant, T.ObjectConstant2))), T.Union(T.ObjectConstant2, T.ObjectConstant1)); - CheckEqual( - T.Intersect( - T.Union(T.ObjectConstant2, T.ArrayConstant1), - T.Union(T.ObjectConstant1, T.ArrayConstant2)), - T.ArrayConstant1); } template<class Type2, class TypeHandle2, class Region2, class Rep2> void Convert() { Types<Type2, TypeHandle2, Region2> T2( Rep2::ToRegion(&zone, isolate), isolate); - for (int i = 0; i < 100; ++i) { - TypeHandle type = T.Fuzz(); - CheckEqual(type, - T.template Convert<Type2>(T2.template Convert<Type>(type))); + for (TypeIterator it = T.types.begin(); it != T.types.end(); ++it) { + TypeHandle type1 = *it; + TypeHandle2 type2 = T2.template Convert<Type>(type1); + TypeHandle type3 = T.template Convert<Type2>(type2); + CheckEqual(type1, type3); } } }; @@ -840,27 +1632,55 @@ typedef Tests<Type, Type*, Zone, ZoneRep> ZoneTests; typedef Tests<HeapType, Handle<HeapType>, Isolate, HeapRep> HeapTests; -TEST(Bitset) { +TEST(BitsetType) { CcTest::InitializeVM(); ZoneTests().Bitset(); HeapTests().Bitset(); } -TEST(Class) { +TEST(ClassType) { CcTest::InitializeVM(); ZoneTests().Class(); HeapTests().Class(); } -TEST(Constant) { +TEST(ConstantType) { CcTest::InitializeVM(); ZoneTests().Constant(); HeapTests().Constant(); } +TEST(ArrayType) { + CcTest::InitializeVM(); + ZoneTests().Array(); + HeapTests().Array(); +} + + +TEST(FunctionType) { + CcTest::InitializeVM(); + ZoneTests().Function(); + HeapTests().Function(); +} + + +TEST(Of) { + CcTest::InitializeVM(); + ZoneTests().Of(); + HeapTests().Of(); +} + + +TEST(NowOf) { + CcTest::InitializeVM(); + ZoneTests().NowOf(); + HeapTests().NowOf(); +} + + TEST(Is) { CcTest::InitializeVM(); ZoneTests().Is(); @@ -868,6 +1688,27 @@ TEST(Is) { } +TEST(NowIs) { + CcTest::InitializeVM(); + ZoneTests().NowIs(); + HeapTests().NowIs(); +} + + +TEST(Contains) { + CcTest::InitializeVM(); + ZoneTests().Contains(); + HeapTests().Contains(); +} + + +TEST(NowContains) { + CcTest::InitializeVM(); + ZoneTests().NowContains(); + HeapTests().NowContains(); +} + + TEST(Maybe) { CcTest::InitializeVM(); ZoneTests().Maybe(); @@ -875,17 +1716,31 @@ TEST(Maybe) { } -TEST(Union) { +TEST(Union1) { + CcTest::InitializeVM(); + ZoneTests().Union1(); + HeapTests().Union1(); +} + + +TEST(Union2) { + CcTest::InitializeVM(); + ZoneTests().Union2(); + HeapTests().Union2(); +} + + +TEST(Intersect1) { CcTest::InitializeVM(); - ZoneTests().Union(); - HeapTests().Union(); + ZoneTests().Intersect1(); + HeapTests().Intersect1(); } -TEST(Intersect) { +TEST(Intersect2) { CcTest::InitializeVM(); - ZoneTests().Intersect(); - HeapTests().Intersect(); + ZoneTests().Intersect2(); + HeapTests().Intersect2(); } diff --git a/deps/v8/test/cctest/test-weakmaps.cc b/deps/v8/test/cctest/test-weakmaps.cc index 97eca86f1..14a5e020a 100644 --- a/deps/v8/test/cctest/test-weakmaps.cc +++ b/deps/v8/test/cctest/test-weakmaps.cc @@ -43,14 +43,12 @@ static Isolate* GetIsolateFrom(LocalContext* context) { static Handle<JSWeakMap> AllocateJSWeakMap(Isolate* isolate) { Factory* factory = isolate->factory(); - Heap* heap = isolate->heap(); Handle<Map> map = factory->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize); Handle<JSObject> weakmap_obj = factory->NewJSObjectFromMap(map); Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj)); // Do not use handles for the hash table, it would make entries strong. - Object* table_obj = ObjectHashTable::Allocate(heap, 1)->ToObjectChecked(); - ObjectHashTable* table = ObjectHashTable::cast(table_obj); - weakmap->set_table(table); + Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 1); + weakmap->set_table(*table); weakmap->set_next(Smi::FromInt(0)); return weakmap; } @@ -180,14 +178,15 @@ TEST(Shrinking) { // Test that weak map values on an evacuation candidate which are not reachable // by other paths are correctly recorded in the slots buffer. TEST(Regress2060a) { + if (i::FLAG_never_compact) return; FLAG_always_compact = true; LocalContext context; Isolate* isolate = GetIsolateFrom(&context); Factory* factory = isolate->factory(); Heap* heap = isolate->heap(); HandleScope scope(isolate); - Handle<JSFunction> function = - factory->NewFunction(factory->function_string(), factory->null_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + factory->function_string(), factory->null_value()); Handle<JSObject> key = factory->NewJSObject(function); Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate); @@ -215,6 +214,7 @@ TEST(Regress2060a) { // Test that weak map keys on an evacuation candidate which are reachable by // other strong paths are correctly recorded in the slots buffer. TEST(Regress2060b) { + if (i::FLAG_never_compact) return; FLAG_always_compact = true; #ifdef VERIFY_HEAP FLAG_verify_heap = true; @@ -225,8 +225,8 @@ TEST(Regress2060b) { Factory* factory = isolate->factory(); Heap* heap = isolate->heap(); HandleScope scope(isolate); - Handle<JSFunction> function = - factory->NewFunction(factory->function_string(), factory->null_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + factory->function_string(), factory->null_value()); // Start second old-space page so that keys land on evacuation candidate. Page* first_page = heap->old_pointer_space()->anchor()->next_page(); diff --git a/deps/v8/test/cctest/test-weaksets.cc b/deps/v8/test/cctest/test-weaksets.cc index 514b6b239..a3a947897 100644 --- a/deps/v8/test/cctest/test-weaksets.cc +++ b/deps/v8/test/cctest/test-weaksets.cc @@ -43,14 +43,12 @@ static Isolate* GetIsolateFrom(LocalContext* context) { static Handle<JSWeakSet> AllocateJSWeakSet(Isolate* isolate) { Factory* factory = isolate->factory(); - Heap* heap = isolate->heap(); Handle<Map> map = factory->NewMap(JS_WEAK_SET_TYPE, JSWeakSet::kSize); Handle<JSObject> weakset_obj = factory->NewJSObjectFromMap(map); Handle<JSWeakSet> weakset(JSWeakSet::cast(*weakset_obj)); // Do not use handles for the hash table, it would make entries strong. - Object* table_obj = ObjectHashTable::Allocate(heap, 1)->ToObjectChecked(); - ObjectHashTable* table = ObjectHashTable::cast(table_obj); - weakset->set_table(table); + Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 1); + weakset->set_table(*table); weakset->set_next(Smi::FromInt(0)); return weakset; } @@ -180,14 +178,15 @@ TEST(WeakSet_Shrinking) { // Test that weak set values on an evacuation candidate which are not reachable // by other paths are correctly recorded in the slots buffer. TEST(WeakSet_Regress2060a) { + if (i::FLAG_never_compact) return; FLAG_always_compact = true; LocalContext context; Isolate* isolate = GetIsolateFrom(&context); Factory* factory = isolate->factory(); Heap* heap = isolate->heap(); HandleScope scope(isolate); - Handle<JSFunction> function = - factory->NewFunction(factory->function_string(), factory->null_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + factory->function_string(), factory->null_value()); Handle<JSObject> key = factory->NewJSObject(function); Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate); @@ -215,6 +214,7 @@ TEST(WeakSet_Regress2060a) { // Test that weak set keys on an evacuation candidate which are reachable by // other strong paths are correctly recorded in the slots buffer. TEST(WeakSet_Regress2060b) { + if (i::FLAG_never_compact) return; FLAG_always_compact = true; #ifdef VERIFY_HEAP FLAG_verify_heap = true; @@ -225,8 +225,8 @@ TEST(WeakSet_Regress2060b) { Factory* factory = isolate->factory(); Heap* heap = isolate->heap(); HandleScope scope(isolate); - Handle<JSFunction> function = - factory->NewFunction(factory->function_string(), factory->null_value()); + Handle<JSFunction> function = factory->NewFunctionWithPrototype( + factory->function_string(), factory->null_value()); // Start second old-space page so that keys land on evacuation candidate. Page* first_page = heap->old_pointer_space()->anchor()->next_page(); diff --git a/deps/v8/test/fuzz-natives/base.js b/deps/v8/test/fuzz-natives/base.js new file mode 100644 index 000000000..b9f70043f --- /dev/null +++ b/deps/v8/test/fuzz-natives/base.js @@ -0,0 +1,99 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// TODO(jkummerow): There are many ways to improve these tests, e.g.: +// - more variance in randomized inputs +// - better time complexity management +// - better code readability and documentation of intentions. + +var RUN_WITH_ALL_ARGUMENT_ENTRIES = false; +var kOnManyArgumentsRemove = 5; + +function makeArguments() { + var result = [ ]; + result.push(17); + result.push(-31); + result.push(new Array(100)); + result.push(new Array(100003)); + result.push(Number.MIN_VALUE); + result.push("whoops"); + result.push("x"); + result.push({"x": 1, "y": 2}); + var slowCaseObj = {"a": 3, "b": 4, "c": 5}; + delete slowCaseObj.c; + result.push(slowCaseObj); + result.push(function () { return 8; }); + return result; +} + +var kArgObjects = makeArguments().length; + +function makeFunction(name, argc) { + var args = []; + for (var i = 0; i < argc; i++) + args.push("x" + i); + var argsStr = args.join(", "); + return new Function(argsStr, + "return %" + name + "(" + argsStr + ");"); +} + +function testArgumentCount(name, argc) { + for (var i = 0; i < 10; i++) { + var func = null; + try { + func = makeFunction(name, i); + } catch (e) { + if (e != "SyntaxError: Illegal access") throw e; + } + if (func === null && i == argc) { + throw "unexpected exception"; + } + var args = [ ]; + for (var j = 0; j < i; j++) + args.push(0); + try { + func.apply(void 0, args); + } catch (e) { + // we don't care what happens as long as we don't crash + } + } +} + +function testArgumentTypes(name, argc) { + var type = 0; + var hasMore = true; + var func = makeFunction(name, argc); + while (hasMore) { + var argPool = makeArguments(); + // When we have 5 or more arguments we lower the amount of tests cases + // by randomly removing kOnManyArgumentsRemove entries + var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ? + kArgObjects : kArgObjects - kOnManyArgumentsRemove; + if (kArgObjects >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) { + for (var i = 0; i < kOnManyArgumentsRemove; i++) { + var rand = Math.floor(Math.random() * (kArgObjects - i)); + argPool.splice(rand, 1); + } + } + var current = type; + hasMore = false; + var argList = [ ]; + for (var i = 0; i < argc; i++) { + var index = current % numArguments; + current = (current / numArguments) << 0; + if (index != (numArguments - 1)) + hasMore = true; + argList.push(argPool[index]); + } + try { + func.apply(void 0, argList); + } catch (e) { + // we don't care what happens as long as we don't crash + } + type++; + } +} + +testArgumentCount(NAME, ARGC); +testArgumentTypes(NAME, ARGC); diff --git a/deps/v8/test/fuzz-natives/fuzz-natives.status b/deps/v8/test/fuzz-natives/fuzz-natives.status new file mode 100644 index 000000000..fb3cae902 --- /dev/null +++ b/deps/v8/test/fuzz-natives/fuzz-natives.status @@ -0,0 +1,50 @@ +# Copyright 2014 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +[ +[ALWAYS, { + # These are designed to crash: + "Abort": [SKIP], + "AbortJS": [SKIP], + "SystemBreak": [SKIP], + "_DebugBreakInOptimizedCode": [SKIP], + + # varargs. + "Call": [SKIP], + "_CallFunction": [SKIP], + + # Implemented in the parser, not callable. + "IS_VAR": [SKIP], + + # Compile-time ASSERTs. + "_DateField": [SKIP], + "_GetFromCache": [SKIP], + + # Riddled with ASSERTs. + "CompileForOnStackReplacement": [SKIP], + + # Too slow for fuzzing. + "SetAllocationTimeout": [SKIP], + + # TODO(jkummerow): Fix these and un-blacklist them! + "CreateDateTimeFormat": [SKIP], + "CreateNumberFormat": [SKIP], + + # TODO(jkummerow): Figure out what to do about inlined functions. + "_GeneratorNext": [SKIP], + "_GeneratorThrow": [SKIP], + "_GetCachedArrayIndex": [SKIP], + "_HasCachedArrayIndex": [SKIP], + "_IsStringWrapperSafeForDefaultValueOf": [SKIP], + "_OneByteSeqStringSetChar": [SKIP], + "_RegExpConstructResult": [SKIP], + "_TwoByteSeqStringSetChar": [SKIP], + + # These are slow. + "DebugEvaluate": [PASS, SLOW], + "DebugReferencedBy": [PASS, SLOW], + "SetAccessorProperty": [PASS, SLOW], + "SetScopeVariableValue": [PASS, SLOW], +}] # ALWAYS +] diff --git a/deps/v8/test/fuzz-natives/testcfg.py b/deps/v8/test/fuzz-natives/testcfg.py new file mode 100644 index 000000000..d8e3f056c --- /dev/null +++ b/deps/v8/test/fuzz-natives/testcfg.py @@ -0,0 +1,47 @@ +# Copyright 2014 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os + +from testrunner.local import commands +from testrunner.local import testsuite +from testrunner.local import utils +from testrunner.objects import testcase + +class FuzzNativesTestSuite(testsuite.TestSuite): + + def __init__(self, name, root): + super(FuzzNativesTestSuite, self).__init__(name, root) + + def ListTests(self, context): + shell = os.path.abspath(os.path.join(context.shell_dir, self.shell())) + if utils.IsWindows(): + shell += ".exe" + output = commands.Execute( + context.command_prefix + + [shell, "--allow-natives-syntax", "-e", + "try { var natives = %ListNatives();" + " for (var n in natives) { print(natives[n]); }" + "} catch(e) {}"] + + context.extra_flags) + if output.exit_code != 0: + print output.stdout + print output.stderr + assert false, "Failed to get natives list." + tests = [] + for line in output.stdout.strip().split(): + (name, argc) = line.split(",") + flags = ["--allow-natives-syntax", + "-e", "var NAME = '%s', ARGC = %s;" % (name, argc)] + test = testcase.TestCase(self, name, flags) + tests.append(test) + return tests + + def GetFlagsForTestCase(self, testcase, context): + name = testcase.path + basefile = os.path.join(self.root, "base.js") + return testcase.flags + [basefile] + context.mode_flags + +def GetSuite(name, root): + return FuzzNativesTestSuite(name, root) diff --git a/deps/v8/test/intl/break-iterator/protected-icu-internals.js b/deps/v8/test/intl/break-iterator/protected-icu-internals.js deleted file mode 100644 index ad1dc54fb..000000000 --- a/deps/v8/test/intl/break-iterator/protected-icu-internals.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Internal object we got from native code should not be writable, -// configurable or enumerable. One can still change its public properties, but -// we don't use them to do actual work. - -var iterator = new Intl.v8BreakIterator([]); - -// Direct write should fail. -iterator.iterator = {'zzz':'some random object'}; - -assertFalse(iterator.iterator.hasOwnProperty('zzz')); - -// Try redefining the property. -var didThrow = false; -try { - Object.defineProperty(iterator, 'iterator', {value: undefined}); -} catch(e) { - didThrow = true; -} -assertTrue(didThrow); - -// Try deleting the property. -assertFalse(delete iterator.iterator); diff --git a/deps/v8/test/intl/collator/protected-icu-internals.js b/deps/v8/test/intl/collator/protected-icu-internals.js deleted file mode 100644 index 7acd35e45..000000000 --- a/deps/v8/test/intl/collator/protected-icu-internals.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Internal object we got from native code should not be writable, -// configurable or enumerable. One can still change its public properties, but -// we don't use them to do actual work. - -var collator = new Intl.Collator([]); - -// Direct write should fail. -collator.collator = {'zzz':'some random object'}; - -assertFalse(collator.collator.hasOwnProperty('zzz')); - -// Try redefining the property. -var didThrow = false; -try { - Object.defineProperty(collator, 'collator', {value: undefined}); -} catch(e) { - didThrow = true; -} -assertTrue(didThrow); - -// Try deleting the property. -assertFalse(delete collator.collator); diff --git a/deps/v8/test/intl/date-format/protected-icu-internals.js b/deps/v8/test/intl/date-format/protected-icu-internals.js deleted file mode 100644 index 140f4b594..000000000 --- a/deps/v8/test/intl/date-format/protected-icu-internals.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Internal object we got from native code should not be writable, -// configurable or enumerable. One can still change its public properties, but -// we don't use them to do actual work. - -var format = new Intl.DateTimeFormat([]); - -// Direct write should fail. -format.formatter = {'zzz':'some random object'}; - -assertFalse(format.formatter.hasOwnProperty('zzz')); - -// Try redefining the property. -var didThrow = false; -try { - Object.defineProperty(format, 'formatter', {value: undefined}); -} catch(e) { - didThrow = true; -} -assertTrue(didThrow); - -// Try deleting the property. -assertFalse(delete format.formatter); diff --git a/deps/v8/test/intl/number-format/protected-icu-internals.js b/deps/v8/test/intl/number-format/protected-icu-internals.js deleted file mode 100644 index fc9b709c8..000000000 --- a/deps/v8/test/intl/number-format/protected-icu-internals.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Internal object we got from native code should not be writable, -// configurable or enumerable. One can still change its public properties, but -// we don't use them to do actual work. - -var format = new Intl.NumberFormat([]); - -// Direct write should fail. -format.formatter = {'zzz':'some random object'}; - -assertFalse(format.formatter.hasOwnProperty('zzz')); - -// Try redefining the property. -var didThrow = false; -try { - Object.defineProperty(format, 'formatter', {value: undefined}); -} catch(e) { - didThrow = true; -} -assertTrue(didThrow); - -// Try deleting the property. -assertFalse(delete format.formatter); diff --git a/deps/v8/test/mjsunit/array-push10.js b/deps/v8/test/mjsunit/array-push10.js new file mode 100644 index 000000000..223186e0e --- /dev/null +++ b/deps/v8/test/mjsunit/array-push10.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function __f_17(__v_9) { + var __v_10 = 0; + var count = 10000; + while (count-- != 0) { + __v_9.push(0); + if (++__v_10 >= 2) return __v_9; + __v_10 = {}; + } +} + +__v_14 = __f_17([]); diff --git a/deps/v8/test/mjsunit/array-push11.js b/deps/v8/test/mjsunit/array-push11.js new file mode 100644 index 000000000..118161a98 --- /dev/null +++ b/deps/v8/test/mjsunit/array-push11.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function __f_17(__v_9) { + var __v_10 = 0; + var count = 100000; + while (count-- != 0) { + var l = __v_9.push(0); + if (++__v_10 >= 2) return __v_9; + __v_10 = {}; + } +} + +__f_17([]); diff --git a/deps/v8/test/mjsunit/array-push2.js b/deps/v8/test/mjsunit/array-push2.js new file mode 100644 index 000000000..fe4be4eb3 --- /dev/null +++ b/deps/v8/test/mjsunit/array-push2.js @@ -0,0 +1,21 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var array = []; +var v = 0; + +Object.defineProperty(Array.prototype, "0", { + get: function() { return "get " + v; }, + set: function(value) { v += value; } +}); + +array[0] = 10; +assertEquals(0, array.length); +assertEquals(10, v); +assertEquals("get 10", array[0]); + +array.push(100); +assertEquals(1, array.length); +assertEquals(110, v); +assertEquals("get 110", array[0]); diff --git a/deps/v8/test/mjsunit/array-push3.js b/deps/v8/test/mjsunit/array-push3.js new file mode 100644 index 000000000..99bd857a7 --- /dev/null +++ b/deps/v8/test/mjsunit/array-push3.js @@ -0,0 +1,29 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +var array = []; + +function push(array, value) { + array.push(value); +} + +push(array, 0); +push(array, 1); +push(array, 2); +%OptimizeFunctionOnNextCall(push); +push(array, 3); + +var v = 0; +Object.defineProperty(Array.prototype, "4", { + get: function() { return 100; }, + set: function(value) { v = value; } +}); + +push(array, 4); + +assertEquals(5, array.length); +assertEquals(100, array[4]); +assertEquals(4, v); diff --git a/deps/v8/test/mjsunit/array-push4.js b/deps/v8/test/mjsunit/array-push4.js new file mode 100644 index 000000000..678873fb3 --- /dev/null +++ b/deps/v8/test/mjsunit/array-push4.js @@ -0,0 +1,60 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +var v = 0; +var my_array_proto = {}; +my_array_proto.__proto__ = [].__proto__; +Object.defineProperty(my_array_proto, "0", { +get: function() { return "get " + v; }, +set: function(value) { v += value; } +}); + + +// Test that element accessors are called in standard push cases. +array = []; +array.__proto__ = my_array_proto; + +array[0] = 10; +assertEquals(0, array.length); +assertEquals(10, v); +assertEquals("get 10", array[0]); + +Array.prototype.push.call(array, 100); +assertEquals(1, array.length); +assertEquals(110, v); +assertEquals("get 110", array[0]); + +array = []; +array.__proto__ = my_array_proto; + +assertEquals(0, array.length); +array.push(110); +assertEquals(1, array.length); +assertEquals(220, v); +assertEquals("get 220", array[0]); + +// Test that elements setters/getters on prototype chain are property detected +// and don't lead to overzealous optimization. +v = 0; +function push_wrapper_1(array, value) { + array.push(value); +} +array = []; +array.__proto__ = my_array_proto; +push_wrapper_1(array, 100); +assertEquals(1, array.length); +assertEquals(100, v); +push_wrapper_1(array, 100); +assertEquals(2, array.length); +assertEquals(100, v); +assertEquals("get 100", array[0]); +%OptimizeFunctionOnNextCall(push_wrapper_1); +array = []; +array.__proto__ = my_array_proto; +push_wrapper_1(array, 100); +assertEquals(1, array.length); +assertEquals(200, v); +assertEquals("get 200", array[0]); diff --git a/deps/v8/test/mjsunit/array-push5.js b/deps/v8/test/mjsunit/array-push5.js new file mode 100644 index 000000000..83339a21e --- /dev/null +++ b/deps/v8/test/mjsunit/array-push5.js @@ -0,0 +1,42 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +var v = 0; + +// Test that elements setters/getters on prototype chain set after the fact are +// property detected and don't lead to overzealous optimization. +var my_array_proto = {}; +my_array_proto.__proto__ = [].__proto__; + +function push_wrapper_2(array, value) { + array.push(value); +} +array = []; +array.__proto__ = my_array_proto; +push_wrapper_2(array, 66); +assertEquals(1, array.length); +assertEquals(0, v); +assertEquals(66, array[0]); +push_wrapper_2(array, 77); +assertEquals(2, array.length); +assertEquals(0, v); +assertEquals(77, array[1]); +%OptimizeFunctionOnNextCall(push_wrapper_2); +push_wrapper_2(array, 88); +assertEquals(3, array.length); +assertEquals(0, v); +assertEquals(88, array[2]); +assertOptimized(push_wrapper_2); +// Defining accessor should deopt optimized push. +Object.defineProperty(my_array_proto, "3", { +get: function() { return "get " + v; }, +set: function(value) { v += value; } +}); +assertUnoptimized(push_wrapper_2); +push_wrapper_2(array, 99); +assertEquals(4, array.length); +assertEquals(99, v); +assertEquals("get 99", array[3]); diff --git a/deps/v8/test/mjsunit/array-push6.js b/deps/v8/test/mjsunit/array-push6.js new file mode 100644 index 000000000..336b43d8d --- /dev/null +++ b/deps/v8/test/mjsunit/array-push6.js @@ -0,0 +1,22 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function push_wrapper(array, value) { + array.push(value); +} + +// Test that optimization of Array.push() for non-Arrays works correctly. +var object = { x : 8, length: 3 }; +object[18] = 5; +object.__proto__ = Array.prototype; +push_wrapper(object, 1); +push_wrapper(object, 1); +assertEquals(5, object.length); +%OptimizeFunctionOnNextCall(push_wrapper); +push_wrapper(object, 1); +push_wrapper(object, 1); +assertEquals(8, object.x); +assertEquals(7, object.length); diff --git a/deps/v8/test/mjsunit/array-push7.js b/deps/v8/test/mjsunit/array-push7.js new file mode 100644 index 000000000..b45a739d7 --- /dev/null +++ b/deps/v8/test/mjsunit/array-push7.js @@ -0,0 +1,59 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +var v = 0; + +function push_wrapper(array, value) { + array.push(value); +} +function pop_wrapper(array) { + return array.pop(); +} + +// Test that Object.observe() notification events are properly sent from +// Array.push() and Array.pop() both from optimized and un-optimized code. +var array = []; + +function somethingChanged(changes) { + v++; +} + +Object.observe(array, somethingChanged); +push_wrapper(array, 1); +%RunMicrotasks(); +assertEquals(1, array.length); +assertEquals(1, v); +push_wrapper(array, 1); +%RunMicrotasks(); +assertEquals(2, array.length); +assertEquals(2, v); +%OptimizeFunctionOnNextCall(push_wrapper); +push_wrapper(array, 1); +%RunMicrotasks(); +assertEquals(3, array.length); +assertEquals(3, v); +push_wrapper(array, 1); +%RunMicrotasks(); +assertEquals(4, array.length); +assertEquals(4, v); + +pop_wrapper(array); +%RunMicrotasks(); +assertEquals(3, array.length); +assertEquals(5, v); +pop_wrapper(array); +%RunMicrotasks(); +assertEquals(2, array.length); +assertEquals(6, v); +%OptimizeFunctionOnNextCall(pop_wrapper); +pop_wrapper(array); +%RunMicrotasks(); +assertEquals(1, array.length); +assertEquals(7, v); +pop_wrapper(array); +%RunMicrotasks(); +assertEquals(0, array.length); +assertEquals(8, v); diff --git a/deps/v8/test/mjsunit/array-push8.js b/deps/v8/test/mjsunit/array-push8.js new file mode 100644 index 000000000..b1a87669d --- /dev/null +++ b/deps/v8/test/mjsunit/array-push8.js @@ -0,0 +1,37 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function push_wrapper(array, value) { + array.push(value); +} +function pop_wrapper(array) { + return array.pop(); +} + +// Test the frzon arrays throw an exception if you try to push to them, both in +// optimized and non-optimized code. +var array = [2, 2]; +Object.freeze(array); + +try { push_wrapper(array, 1); } catch (e) {} +assertEquals(2, array.length); +try { push_wrapper(array, 1); } catch (e) {} +assertEquals(2, array.length); +%OptimizeFunctionOnNextCall(push_wrapper); +try { push_wrapper(array, 1); } catch (e) {} +assertEquals(2, array.length); +try { push_wrapper(array, 1); } catch (e) {} +assertEquals(2, array.length); + +try { pop_wrapper(array); } catch (e) {} +assertEquals(2, array.length); +try { pop_wrapper(array); } catch (e) {} +assertEquals(2, array.length); +%OptimizeFunctionOnNextCall(pop_wrapper); +try { pop_wrapper(array); } catch (e) {} +assertEquals(2, array.length); +try { pop_wrapper(array); } catch (e) {} +assertEquals(2, array.length); diff --git a/deps/v8/test/mjsunit/array-push9.js b/deps/v8/test/mjsunit/array-push9.js new file mode 100644 index 000000000..d80cee89e --- /dev/null +++ b/deps/v8/test/mjsunit/array-push9.js @@ -0,0 +1,29 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --deopt-every-n-times=5 --nodead-code-elimination + +var array = []; + +function push(array, value) { + array.push(value); +} + +push(array, 0); +push(array, 1); +push(array, 2); +%OptimizeFunctionOnNextCall(push); +push(array, 3); + +var v = 0; +Object.defineProperty(Array.prototype, "4", { + get: function() { return 100; }, + set: function(value) { v = value; } +}); + +push(array, 4); + +assertEquals(5, array.length); +assertEquals(100, array[4]); +assertEquals(4, v); diff --git a/deps/v8/test/mjsunit/binary-op-newspace.js b/deps/v8/test/mjsunit/binary-op-newspace.js index e3341c4a7..dac7d24db 100644 --- a/deps/v8/test/mjsunit/binary-op-newspace.js +++ b/deps/v8/test/mjsunit/binary-op-newspace.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 --noopt +// Flags: --max-new-space-size=2 --noopt // Check that a mod where the stub code hits a failure in heap number // allocation still works. diff --git a/deps/v8/test/mjsunit/bugs/harmony/debug-blockscopes.js b/deps/v8/test/mjsunit/bugs/harmony/debug-blockscopes.js index fda32eb3d..9ef8efbc0 100644 --- a/deps/v8/test/mjsunit/bugs/harmony/debug-blockscopes.js +++ b/deps/v8/test/mjsunit/bugs/harmony/debug-blockscopes.js @@ -144,18 +144,10 @@ function CheckScopeContent(content, number, exec_state) { if (!scope.scopeObject().property('arguments').isUndefined()) { scope_size--; } - // Also ignore synthetic variable from catch block. - if (!scope.scopeObject().property('.catch-var').isUndefined()) { - scope_size--; - } // Skip property with empty name. if (!scope.scopeObject().property('').isUndefined()) { scope_size--; } - // Also ignore synthetic variable from block scopes. - if (!scope.scopeObject().property('.block').isUndefined()) { - scope_size--; - } if (count != scope_size) { print('Names found in scope:'); diff --git a/deps/v8/test/mjsunit/compiler/math-floor-global.js b/deps/v8/test/mjsunit/compiler/math-floor-global.js index 3b9d12545..4a3bcb722 100644 --- a/deps/v8/test/mjsunit/compiler/math-floor-global.js +++ b/deps/v8/test/mjsunit/compiler/math-floor-global.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=128 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax // Test inlining of Math.floor when assigned to a global. var flo = Math.floor; diff --git a/deps/v8/test/mjsunit/compiler/math-floor-local.js b/deps/v8/test/mjsunit/compiler/math-floor-local.js index fef3347e8..8424ac96d 100644 --- a/deps/v8/test/mjsunit/compiler/math-floor-local.js +++ b/deps/v8/test/mjsunit/compiler/math-floor-local.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=128 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax // Test inlining of Math.floor when assigned to a local. var test_id = 0; diff --git a/deps/v8/test/mjsunit/debug-liveedit-newsource.js b/deps/v8/test/mjsunit/debug-liveedit-newsource.js index a60e69f90..49fde66de 100644 --- a/deps/v8/test/mjsunit/debug-liveedit-newsource.js +++ b/deps/v8/test/mjsunit/debug-liveedit-newsource.js @@ -32,6 +32,7 @@ Debug = debug.Debug eval("var something1 = 25; \n" + "var something2 = 2010; \n" + + "// Array(); \n" + "function ChooseAnimal() {\n" + " return 'Cat';\n" + "} \n" @@ -54,6 +55,13 @@ var new_source = script.source.replace("Cat", "Cap' + 'yb' + 'ara"); var new_source = new_source.replace("25", "26"); var new_source = new_source.replace("Help", "Hello"); var new_source = new_source.replace("17", "18"); +// The call to array causes a change in the number of type feedback slots for +// the script. +// +// TODO(mvstanton): For now, the inclusion of the Array() call at the top level +// of the script causes us to visit a corner case, but I'd like to validate +// correctness more explicitly. +var new_source = new_source.replace("// Array", "Array"); print("new source: " + new_source); var change_log = new Array(); diff --git a/deps/v8/test/mjsunit/debug-scopes.js b/deps/v8/test/mjsunit/debug-scopes.js index f5b5ec913..ce37d2402 100644 --- a/deps/v8/test/mjsunit/debug-scopes.js +++ b/deps/v8/test/mjsunit/debug-scopes.js @@ -166,10 +166,6 @@ function CheckScopeContent(content, number, exec_state) { if (!scope.scopeObject().property('arguments').isUndefined()) { scope_size--; } - // Also ignore synthetic variable from catch block. - if (!scope.scopeObject().property('.catch-var').isUndefined()) { - scope_size--; - } // Skip property with empty name. if (!scope.scopeObject().property('').isUndefined()) { scope_size--; diff --git a/deps/v8/test/mjsunit/debug-stepout-scope-part2.js b/deps/v8/test/mjsunit/debug-stepout-scope-part2.js index 121c7b74d..69cee994a 100644 --- a/deps/v8/test/mjsunit/debug-stepout-scope-part2.js +++ b/deps/v8/test/mjsunit/debug-stepout-scope-part2.js @@ -54,7 +54,7 @@ Debug.setListener(listener); var q = 42; var prefixes = [ "debugger; ", - "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ]; + "if (false) { try { throw 0; } catch(x) { this.x = x; } }; debugger; " ]; var bodies = [ "1", "1 ", "1;", diff --git a/deps/v8/test/mjsunit/debug-stepout-scope-part3.js b/deps/v8/test/mjsunit/debug-stepout-scope-part3.js index 16b085e54..319f87991 100644 --- a/deps/v8/test/mjsunit/debug-stepout-scope-part3.js +++ b/deps/v8/test/mjsunit/debug-stepout-scope-part3.js @@ -55,7 +55,7 @@ Debug.setListener(listener); var q = 42; var prefixes = [ "debugger; ", - "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ]; + "if (false) { try { throw 0; } catch(x) { this.x = x; } }; debugger; " ]; var with_bodies = [ "with ({}) {}", "with ({x:1}) x", "with ({x:1}) x = 1", diff --git a/deps/v8/test/mjsunit/debug-stepout-scope-part4.js b/deps/v8/test/mjsunit/debug-stepout-scope-part4.js index 48f43477d..eb9c82f8c 100644 --- a/deps/v8/test/mjsunit/debug-stepout-scope-part4.js +++ b/deps/v8/test/mjsunit/debug-stepout-scope-part4.js @@ -55,7 +55,7 @@ Debug.setListener(listener); var q = 42; var prefixes = [ "debugger; ", - "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ]; + "if (false) { try { throw 0; } catch(x) { this.x = x; } }; debugger; " ]; var bodies = [ "1", "1 ", "1;", diff --git a/deps/v8/test/mjsunit/debug-stepout-scope-part5.js b/deps/v8/test/mjsunit/debug-stepout-scope-part5.js index f060ec388..250bee4d8 100644 --- a/deps/v8/test/mjsunit/debug-stepout-scope-part5.js +++ b/deps/v8/test/mjsunit/debug-stepout-scope-part5.js @@ -54,7 +54,7 @@ Debug.setListener(listener); var q = 42; var prefixes = [ "debugger; ", - "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ]; + "if (false) { try { throw 0; } catch(x) { this.x = x; } }; debugger; " ]; var with_bodies = [ "with ({}) {}", "with ({x:1}) x", "with ({x:1}) x = 1", diff --git a/deps/v8/test/mjsunit/debug-stepout-scope-part6.js b/deps/v8/test/mjsunit/debug-stepout-scope-part6.js index f7c8df0bc..2d8357fea 100644 --- a/deps/v8/test/mjsunit/debug-stepout-scope-part6.js +++ b/deps/v8/test/mjsunit/debug-stepout-scope-part6.js @@ -54,7 +54,7 @@ Debug.setListener(listener); var q = 42; var prefixes = [ "debugger; ", - "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ]; + "if (false) { try { throw 0; } catch(x) { this.x = x; } }; debugger; " ]; var bodies = [ "1", "1 ", "1;", diff --git a/deps/v8/test/mjsunit/define-property-gc.js b/deps/v8/test/mjsunit/define-property-gc.js index b38164d0f..573a7edbd 100644 --- a/deps/v8/test/mjsunit/define-property-gc.js +++ b/deps/v8/test/mjsunit/define-property-gc.js @@ -26,7 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Tests the handling of GC issues in the defineProperty method. -// Flags: --max-new-space-size=256 +// Flags: --max-new-space-size=2 function Regular() { this[0] = 0; diff --git a/deps/v8/test/mjsunit/delay-syntax-error.js b/deps/v8/test/mjsunit/delay-syntax-error.js index 64cc1429b..20b2affac 100644 --- a/deps/v8/test/mjsunit/delay-syntax-error.js +++ b/deps/v8/test/mjsunit/delay-syntax-error.js @@ -25,18 +25,11 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// To be compatible with JSC syntax errors for illegal returns should be delayed -// to runtime. -// Invalid continue and break statements are caught at compile time. - -// Do not throw syntax errors for illegal return at compile time. -assertDoesNotThrow("if (false) return;"); - -// Throw syntax errors for illegal break and continue at compile time. +// Throw syntax errors for illegal return, break and continue at compile time. +assertThrows("if (false) return;"); assertThrows("if (false) break;"); assertThrows("if (false) continue;"); -// Throw syntax errors for illegal return, break and continue at runtime. assertThrows("return;"); assertThrows("break;"); assertThrows("continue;"); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-caught-all.js b/deps/v8/test/mjsunit/es6/debug-promises-caught-all.js new file mode 100644 index 000000000..5189373e1 --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-caught-all.js @@ -0,0 +1,55 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we listen to all exceptions and +// there is a catch handler for the exception thrown in a Promise. +// We expect a normal Exception debug event to be triggered. + +Debug = debug.Debug; + +var log = []; +var step = 0; + +var p = new Promise(function(resolve, reject) { + log.push("resolve"); + resolve(); +}); + +var q = p.chain( + function() { + log.push("throw"); + throw new Error("caught"); + }); + +q.catch( + function(e) { + assertEquals("caught", e.message); + }); + +function listener(event, exec_state, event_data, data) { + try { + // Ignore exceptions during startup in stress runs. + if (step >= 1) return; + assertEquals(["resolve", "end main", "throw"], log); + if (event == Debug.DebugEvent.Exception) { + assertEquals("caught", event_data.exception().message); + assertEquals(undefined, event_data.promise()); + assertFalse(event_data.uncaught()); + step++; + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnException(); +Debug.setListener(listener); + +log.push("end main"); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-caught-late.js b/deps/v8/test/mjsunit/es6/debug-promises-caught-late.js new file mode 100644 index 000000000..66e073d4a --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-caught-late.js @@ -0,0 +1,38 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we only listen to uncaught exceptions, the Promise +// throws, and a catch handler is installed right before throwing. +// We expect no debug event to be triggered. + +Debug = debug.Debug; + +var p = new Promise(function(resolve, reject) { + resolve(); +}); + +var q = p.chain( + function() { + q.catch(function(e) { + assertEquals("caught", e.message); + }); + throw new Error("caught"); + }); + +function listener(event, exec_state, event_data, data) { + try { + assertTrue(event != Debug.DebugEvent.Exception); + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-caught-uncaught.js b/deps/v8/test/mjsunit/es6/debug-promises-caught-uncaught.js new file mode 100644 index 000000000..9620d31bd --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-caught-uncaught.js @@ -0,0 +1,40 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we only listen to uncaught exceptions and +// there is a catch handler for the exception thrown in a Promise. +// We expect no debug event to be triggered. + +Debug = debug.Debug; + +var p = new Promise(function(resolve, reject) { + resolve(); +}); + +var q = p.chain( + function() { + throw new Error("caught"); + }); + +q.catch( + function(e) { + assertEquals("caught", e.message); + }); + +function listener(event, exec_state, event_data, data) { + try { + assertTrue(event != Debug.DebugEvent.Exception); + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-reentry.js b/deps/v8/test/mjsunit/es6/debug-promises-reentry.js new file mode 100644 index 000000000..03c7fc2c8 --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-reentry.js @@ -0,0 +1,17 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test reentry of special try catch for Promises. + +Debug = debug.Debug; + +Debug.setBreakOnUncaughtException(); +Debug.setListener(function(event, exec_state, event_data, data) { }); + +var p = new Promise(function(resolve, reject) { resolve(); }); +var q = p.chain(function() { + new Promise(function(resolve, reject) { resolve(); }); +}); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-throw-in-constructor.js b/deps/v8/test/mjsunit/es6/debug-promises-throw-in-constructor.js new file mode 100644 index 000000000..d0267cefb --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-throw-in-constructor.js @@ -0,0 +1,46 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we only listen to uncaught exceptions and +// an exception is thrown in the the Promise constructor. +// We expect an Exception debug event with a promise to be triggered. + +Debug = debug.Debug; + +var step = 0; +var exception = null; + +function listener(event, exec_state, event_data, data) { + try { + // Ignore exceptions during startup in stress runs. + if (step >= 1) return; + if (event == Debug.DebugEvent.Exception) { + assertEquals(0, step); + assertEquals("uncaught", event_data.exception().message); + assertTrue(event_data.promise() instanceof Promise); + assertTrue(event_data.uncaught()); + // Assert that the debug event is triggered at the throw site. + assertTrue(exec_state.frame(0).sourceLineText().indexOf("// event") > 0); + step++; + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + exception = e; + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); + +var p = new Promise(function(resolve, reject) { + throw new Error("uncaught"); // event +}); + +assertEquals(1, step); +assertNull(exception); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-throw-in-reject.js b/deps/v8/test/mjsunit/es6/debug-promises-throw-in-reject.js new file mode 100644 index 000000000..cdf759606 --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-throw-in-reject.js @@ -0,0 +1,61 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when an exception is thrown inside a Promise, which is +// caught by a custom promise, which throws a new exception in its reject +// handler. We expect an Exception debug event with a promise to be triggered. + +Debug = debug.Debug; + +var log = []; +var step = 0; + +var p = new Promise(function(resolve, reject) { + log.push("resolve"); + resolve(); +}); + +function MyPromise(resolver) { + var reject = function() { + log.push("throw reject"); + throw new Error("reject"); // event + }; + var resolve = function() { }; + log.push("construct"); + resolver(resolve, reject); +}; + +MyPromise.prototype = p; +p.constructor = MyPromise; + +var q = p.chain( + function() { + log.push("throw caught"); + throw new Error("caught"); + }); + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Exception) { + assertEquals(["resolve", "construct", "end main", + "throw caught", "throw reject"], log); + assertEquals("reject", event_data.exception().message); + assertEquals(q, event_data.promise()); + assertTrue(exec_state.frame(0).sourceLineText().indexOf('// event') > 0); + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); + +log.push("end main"); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-uncaught-all.js b/deps/v8/test/mjsunit/es6/debug-promises-uncaught-all.js new file mode 100644 index 000000000..714e7da9c --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-uncaught-all.js @@ -0,0 +1,55 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we listen to all exceptions and +// there is a catch handler for the exception thrown in a Promise. +// We expect an Exception debug event with a promise to be triggered. + +Debug = debug.Debug; + +var log = []; +var step = 0; +var exception = undefined; + +var p = new Promise(function(resolve, reject) { + log.push("resolve"); + resolve(); +}); + +var q = p.chain( + function() { + log.push("throw"); + throw new Error("uncaught"); // event + }); + +function listener(event, exec_state, event_data, data) { + try { + // Ignore exceptions during startup in stress runs. + if (step >= 1) return; + assertEquals(["resolve", "end main", "throw"], log); + if (event == Debug.DebugEvent.Exception) { + assertEquals(0, step); + assertEquals("uncaught", event_data.exception().message); + assertTrue(event_data.promise() instanceof Promise); + assertEquals(q, event_data.promise()); + assertTrue(event_data.uncaught()); + // Assert that the debug event is triggered at the throw site. + assertTrue(exec_state.frame(0).sourceLineText().indexOf("// event") > 0); + step++; + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnException(); +Debug.setListener(listener); + +log.push("end main"); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-uncaught-uncaught.js b/deps/v8/test/mjsunit/es6/debug-promises-uncaught-uncaught.js new file mode 100644 index 000000000..fa97ac0d8 --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-uncaught-uncaught.js @@ -0,0 +1,54 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when we only listen to uncaught exceptions and +// there is a catch handler for the exception thrown in a Promise. +// We expect an Exception debug event with a promise to be triggered. + +Debug = debug.Debug; + +var log = []; +var step = 0; + +var p = new Promise(function(resolve, reject) { + log.push("resolve"); + resolve(); +}); + +var q = p.chain( + function() { + log.push("throw"); + throw new Error("uncaught"); // event + }); + +function listener(event, exec_state, event_data, data) { + try { + // Ignore exceptions during startup in stress runs. + if (step >= 1) return; + assertEquals(["resolve", "end main", "throw"], log); + if (event == Debug.DebugEvent.Exception) { + assertEquals(0, step); + assertEquals("uncaught", event_data.exception().message); + assertTrue(event_data.promise() instanceof Promise); + assertEquals(q, event_data.promise()); + assertTrue(event_data.uncaught()); + // Assert that the debug event is triggered at the throw site. + assertTrue(exec_state.frame(0).sourceLineText().indexOf("// event") > 0); + step++; + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); + +log.push("end main"); diff --git a/deps/v8/test/mjsunit/es6/debug-promises-undefined-reject.js b/deps/v8/test/mjsunit/es6/debug-promises-undefined-reject.js new file mode 100644 index 000000000..5bad5bd37 --- /dev/null +++ b/deps/v8/test/mjsunit/es6/debug-promises-undefined-reject.js @@ -0,0 +1,57 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-promises --expose-debug-as debug + +// Test debug events when an exception is thrown inside a Promise, which is +// caught by a custom promise, which has no reject handler. +// We expect an Exception event with a promise to be triggered. + +Debug = debug.Debug; + +var log = []; +var step = 0; + +var p = new Promise(function(resolve, reject) { + log.push("resolve"); + resolve(); +}); + +function MyPromise(resolver) { + var reject = undefined; + var resolve = function() { }; + log.push("construct"); + resolver(resolve, reject); +}; + +MyPromise.prototype = p; +p.constructor = MyPromise; + +var q = p.chain( + function() { + log.push("throw caught"); + throw new Error("caught"); // event + }); + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Exception) { + assertEquals(["resolve", "construct", "end main", "throw caught"], log); + assertEquals("undefined is not a function", + event_data.exception().message); + assertEquals(q, event_data.promise()); + } + } catch (e) { + // Signal a failure with exit code 1. This is necessary since the + // debugger swallows exceptions and we expect the chained function + // and this listener to be executed after the main script is finished. + print("Unexpected exception: " + e + "\n" + e.stack); + quit(1); + } +} + +Debug.setBreakOnUncaughtException(); +Debug.setListener(listener); + +log.push("end main"); diff --git a/deps/v8/test/mjsunit/es6/mirror-promises.js b/deps/v8/test/mjsunit/es6/mirror-promises.js new file mode 100644 index 000000000..5a21a6b9e --- /dev/null +++ b/deps/v8/test/mjsunit/es6/mirror-promises.js @@ -0,0 +1,69 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-debug-as debug --harmony-promises +// Test the mirror object for promises. + +function MirrorRefCache(json_refs) { + var tmp = eval('(' + json_refs + ')'); + this.refs_ = []; + for (var i = 0; i < tmp.length; i++) { + this.refs_[tmp[i].handle] = tmp[i]; + } +} + +MirrorRefCache.prototype.lookup = function(handle) { + return this.refs_[handle]; +} + +function testPromiseMirror(promise, status, value) { + // Create mirror and JSON representation. + var mirror = debug.MakeMirror(promise); + var serializer = debug.MakeMirrorSerializer(); + var json = JSON.stringify(serializer.serializeValue(mirror)); + var refs = new MirrorRefCache( + JSON.stringify(serializer.serializeReferencedObjects())); + + // Check the mirror hierachy. + assertTrue(mirror instanceof debug.Mirror); + assertTrue(mirror instanceof debug.ValueMirror); + assertTrue(mirror instanceof debug.ObjectMirror); + assertTrue(mirror instanceof debug.PromiseMirror); + + // Check the mirror properties. + assertEquals(status, mirror.status()); + assertTrue(mirror.isPromise()); + assertEquals('promise', mirror.type()); + assertFalse(mirror.isPrimitive()); + assertEquals("Object", mirror.className()); + assertEquals("#<Promise>", mirror.toText()); + assertSame(promise, mirror.value()); + assertEquals(value, mirror.promiseValue()); + + // Parse JSON representation and check. + var fromJSON = eval('(' + json + ')'); + assertEquals('promise', fromJSON.type); + assertEquals('Object', fromJSON.className); + assertEquals('function', refs.lookup(fromJSON.constructorFunction.ref).type); + assertEquals('Promise', refs.lookup(fromJSON.constructorFunction.ref).name); + assertEquals(status, fromJSON.status); + assertEquals(value, fromJSON.promiseValue); +} + +// Test a number of different promises. +var resolved = new Promise(function(resolve, reject) { resolve() }); +var rejected = new Promise(function(resolve, reject) { reject() }); +var pending = new Promise(function(resolve, reject) {}); + +testPromiseMirror(resolved, "resolved", undefined); +testPromiseMirror(rejected, "rejected", undefined); +testPromiseMirror(pending, "pending", undefined); + +var resolvedv = new Promise(function(resolve, reject) { resolve('resolve') }); +var rejectedv = new Promise(function(resolve, reject) { reject('reject') }); +var thrownv = new Promise(function(resolve, reject) { throw 'throw' }); + +testPromiseMirror(resolvedv, "resolved", 'resolve'); +testPromiseMirror(rejectedv, "rejected", 'reject'); +testPromiseMirror(thrownv, "rejected", 'throw'); diff --git a/deps/v8/test/mjsunit/es6/promises.js b/deps/v8/test/mjsunit/es6/promises.js index 96a7bbbf3..6dfe9261a 100644 --- a/deps/v8/test/mjsunit/es6/promises.js +++ b/deps/v8/test/mjsunit/es6/promises.js @@ -399,6 +399,30 @@ function assertAsyncDone(iteration) { (function() { var deferred = Promise.defer() var p1 = deferred.promise + var p2 = p1.then(1, 2) + p2.then( + function(x) { assertAsync(x === 5, "then/resolve-non-function") }, + assertUnreachable + ) + deferred.resolve(5) + assertAsyncRan() +})(); + +(function() { + var deferred = Promise.defer() + var p1 = deferred.promise + var p2 = p1.then(1, 2) + p2.then( + assertUnreachable, + function(x) { assertAsync(x === 5, "then/reject-non-function") } + ) + deferred.reject(5) + assertAsyncRan() +})(); + +(function() { + var deferred = Promise.defer() + var p1 = deferred.promise var p2 = {then: function(onResolve, onReject) { onResolve(p1) }} var p3 = Promise.accept(p2) p3.chain( diff --git a/deps/v8/test/mjsunit/es7/object-observe.js b/deps/v8/test/mjsunit/es7/object-observe.js index f5e84a628..7bb579f0c 100644 --- a/deps/v8/test/mjsunit/es7/object-observe.js +++ b/deps/v8/test/mjsunit/es7/object-observe.js @@ -112,6 +112,8 @@ Object.defineProperty(changeRecordWithAccessor, 'name', { // Object.observe assertThrows(function() { Object.observe("non-object", observer.callback); }, TypeError); +assertThrows(function() { Object.observe(this, observer.callback); }, + TypeError); assertThrows(function() { Object.observe(obj, nonFunction); }, TypeError); assertThrows(function() { Object.observe(obj, frozenFunction); }, TypeError); assertEquals(obj, Object.observe(obj, observer.callback, [1])); @@ -126,6 +128,8 @@ assertEquals(obj, Object.observe(obj, observer.callback)); // Object.unobserve assertThrows(function() { Object.unobserve(4, observer.callback); }, TypeError); +assertThrows(function() { Object.unobserve(this, observer.callback); }, + TypeError); assertThrows(function() { Object.unobserve(obj, nonFunction); }, TypeError); assertEquals(obj, Object.unobserve(obj, observer.callback)); @@ -134,6 +138,7 @@ assertEquals(obj, Object.unobserve(obj, observer.callback)); var notifier = Object.getNotifier(obj); assertSame(notifier, Object.getNotifier(obj)); assertEquals(null, Object.getNotifier(Object.freeze({}))); +assertThrows(function() { Object.getNotifier(this) }, TypeError); assertFalse(notifier.hasOwnProperty('notify')); assertEquals([], Object.keys(notifier)); var notifyDesc = Object.getOwnPropertyDescriptor(notifier.__proto__, 'notify'); @@ -1073,6 +1078,8 @@ function TestObserveNonConfigurable(obj, prop, desc) { Object.unobserve(obj, observer.callback); } +// TODO(rafaelw) Enable when ES6 Proxies are implemented +/* function createProxy(create, x) { var handler = { getPropertyDescriptor: function(k) { @@ -1112,11 +1119,11 @@ function createProxy(create, x) { Object.observe(handler.target, handler.callback); return handler.proxy = create(handler, x); } +*/ var objects = [ {}, [], - this, // global object function(){}, (function(){ return arguments })(), (function(){ "use strict"; return arguments })(), @@ -1124,9 +1131,10 @@ var objects = [ new Date(), Object, Function, Date, RegExp, new Set, new Map, new WeakMap, - new ArrayBuffer(10), new Int32Array(5), - createProxy(Proxy.create, null), - createProxy(Proxy.createFunction, function(){}), + new ArrayBuffer(10), new Int32Array(5) +// TODO(rafaelw) Enable when ES6 Proxies are implemented. +// createProxy(Proxy.create, null), +// createProxy(Proxy.createFunction, function(){}), ]; var properties = ["a", "1", 1, "length", "setPrototype", "name", "caller"]; diff --git a/deps/v8/test/mjsunit/field-type-tracking.js b/deps/v8/test/mjsunit/field-type-tracking.js new file mode 100644 index 000000000..b4901f34c --- /dev/null +++ b/deps/v8/test/mjsunit/field-type-tracking.js @@ -0,0 +1,167 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --nostress-opt --track-field-types + +(function() { + var o = { text: "Hello World!" }; + function A() { + this.a = o; + } + function readA(x) { + return x.a; + } + var a = new A(); + assertUnoptimized(readA); + readA(a); readA(a); readA(a); + %OptimizeFunctionOnNextCall(readA); + assertEquals(readA(a), o); + assertOptimized(readA); + + var b = new A(); + b.b = o; + assertEquals(readA(b), o); + assertUnoptimized(readA); + %OptimizeFunctionOnNextCall(readA); + assertEquals(readA(a), o); + assertOptimized(readA); + assertEquals(readA(a), o); + assertEquals(readA(b), o); + assertOptimized(readA); + + function readAFromB(x) { + return x.a; + } + assertUnoptimized(readAFromB); + readAFromB(b); readAFromB(b); readAFromB(b); + %OptimizeFunctionOnNextCall(readAFromB); + assertEquals(readAFromB(b), o); + assertOptimized(readAFromB); + + var c = new A(); + c.c = o; + assertOptimized(readA); + assertOptimized(readAFromB); + c.a = [1]; + assertUnoptimized(readA); + assertUnoptimized(readAFromB); + assertEquals(readA(a), o); + assertEquals(readA(b), o); + assertEquals(readA(c), [1]); + assertEquals(readAFromB(b), o); + + %OptimizeFunctionOnNextCall(readA); + assertEquals(readA(a), o); + %OptimizeFunctionOnNextCall(readAFromB); + assertEquals(readAFromB(b), o); + assertOptimized(readA); + a.a = [1]; + assertEquals(readA(a), [1]); + assertEquals(readA(b), o); + assertEquals(readA(c), [1]); + assertOptimized(readA); + b.a = [1]; + assertEquals(readA(a), [1]); + assertEquals(readA(b), [1]); + assertEquals(readA(c), [1]); + assertOptimized(readA); + assertOptimized(readAFromB); +})(); + +(function() { + function A() { this.x = 0; } + A.prototype = {y: 20}; + function B(o) { return o.a.y; } + function C() { this.a = new A(); } + + B(new C()); + B(new C()); + %OptimizeFunctionOnNextCall(B); + var c = new C(); + assertEquals(20, B(c)); + assertOptimized(B); + c.a.y = 10; + assertEquals(10, B(c)); + assertUnoptimized(B); + + var c = new C(); + %OptimizeFunctionOnNextCall(B); + assertEquals(20, B(c)); + assertOptimized(B); + c.a.y = 30; + assertEquals(30, B(c)); + assertOptimized(B); +})(); + +(function() { + var x = new Object(); + x.a = 1 + "Long string that results in a cons string"; + x = JSON.parse('{"a":"Short"}'); +})(); + +(function() { + var x = {y: {z: 1}}; + x.y.z = 1.1; +})(); + +(function() { + function Foo(x) { this.x = x; } + var f1 = new Foo({x: 1}); + var f2 = new Foo({x: 2}); + var f3 = new Foo({x: 3}); + function readX(f) { return f.x.x; } + assertEquals(readX(f1), 1); + assertEquals(readX(f2), 2); + assertUnoptimized(readX); + %OptimizeFunctionOnNextCall(readX); + assertEquals(readX(f3), 3); + assertOptimized(readX); + function writeX(f, x) { f.x = x; } + writeX(f1, {x: 11}); + writeX(f2, {x: 22}); + assertUnoptimized(writeX); + assertEquals(readX(f1), 11); + assertEquals(readX(f2), 22); + assertOptimized(readX); + %OptimizeFunctionOnNextCall(writeX); + writeX(f3, {x: 33}); + assertEquals(readX(f3), 33); + assertOptimized(readX); + assertOptimized(writeX); + function addY(f, y) { f.y = y; } + writeX(f1, {a: "a"}); + assertUnoptimized(readX); + assertUnoptimized(writeX); +})(); + +(function() { + function Narf(x) { this.x = x; } + var f1 = new Narf(1); + var f2 = new Narf(2); + var f3 = new Narf(3); + function baz(f, y) { f.y = y; } + baz(f1, {y: 9}); + baz(f2, {y: 9}); + %OptimizeFunctionOnNextCall(baz); + baz(f3, {a: -1}); + assertUnoptimized(baz); +})(); + +(function() { + function Foo(x) { this.x = x; this.a = x; } + function Bar(x) { this.x = x; this.b = x; } + function readA(o) { return o.x.a; } + var f = new Foo({a:1}); + var b = new Bar({a:2}); + assertEquals(readA(f), 1); + assertEquals(readA(b), 2); + assertEquals(readA(f), 1); + assertEquals(readA(b), 2); + %OptimizeFunctionOnNextCall(readA); + assertEquals(readA(f), 1); + assertEquals(readA(b), 2); + assertOptimized(readA); + f.a.y = 0; + assertUnoptimized(readA); +})(); diff --git a/deps/v8/test/mjsunit/function-caller.js b/deps/v8/test/mjsunit/function-caller.js index bc01750f9..a2c54bbfd 100644 --- a/deps/v8/test/mjsunit/function-caller.js +++ b/deps/v8/test/mjsunit/function-caller.js @@ -46,10 +46,10 @@ f(null); // Check called from eval. eval('f(null)'); -// Check called from builtin functions. Only show the initially called -// (publicly exposed) builtin function, not it's internal helper functions. -[Array.prototype.sort, Array.prototype.sort].sort(f); +// Check called from strict builtin functions. +[null, null].sort(f); +// Check called from sloppy builtin functions. "abel".replace(/b/g, function h() { assertEquals(String.prototype.replace, h.caller); }); diff --git a/deps/v8/test/mjsunit/function-length-accessor.js b/deps/v8/test/mjsunit/function-length-accessor.js new file mode 100644 index 000000000..357ac3fdf --- /dev/null +++ b/deps/v8/test/mjsunit/function-length-accessor.js @@ -0,0 +1,35 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-scoping + +function foo(a, b, c, d) { + "use strict" + const x = 10; + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + // long comment to trigger lazy compilation. + x = 20; // This will trigger compile error with harmony scoping. +} + +assertThrows("foo.length()"); diff --git a/deps/v8/test/mjsunit/fuzz-natives-part1.js b/deps/v8/test/mjsunit/fuzz-natives-part1.js deleted file mode 100644 index 0bd0dc1ab..000000000 --- a/deps/v8/test/mjsunit/fuzz-natives-part1.js +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -var RUN_WITH_ALL_ARGUMENT_ENTRIES = false; -var kOnManyArgumentsRemove = 5; - -function makeArguments() { - var result = [ ]; - result.push(17); - result.push(-31); - result.push(new Array(100)); - result.push(new Array(100003)); - result.push(Number.MIN_VALUE); - result.push("whoops"); - result.push("x"); - result.push({"x": 1, "y": 2}); - var slowCaseObj = {"a": 3, "b": 4, "c": 5}; - delete slowCaseObj.c; - result.push(slowCaseObj); - result.push(function () { return 8; }); - return result; -} - -var kArgObjects = makeArguments().length; - -function makeFunction(name, argc) { - var args = []; - for (var i = 0; i < argc; i++) - args.push("x" + i); - var argsStr = args.join(", "); - return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");"); -} - -function testArgumentCount(name, argc) { - for (var i = 0; i < 10; i++) { - var func = null; - try { - func = makeFunction(name, i); - } catch (e) { - if (e != "SyntaxError: Illegal access") throw e; - } - if (func === null && i == argc) { - throw "unexpected exception"; - } - var args = [ ]; - for (var j = 0; j < i; j++) - args.push(0); - try { - func.apply(void 0, args); - } catch (e) { - // we don't care what happens as long as we don't crash - } - } -} - -function testArgumentTypes(name, argc) { - var type = 0; - var hasMore = true; - var func = makeFunction(name, argc); - while (hasMore) { - var argPool = makeArguments(); - // When we have 5 or more arguments we lower the amount of tests cases - // by randomly removing kOnManyArgumentsRemove entries - var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ? - kArgObjects : kArgObjects-kOnManyArgumentsRemove; - if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) { - for (var i = 0; i < kOnManyArgumentsRemove; i++) { - var rand = Math.floor(Math.random() * (kArgObjects - i)); - argPool.splice(rand,1); - } - } - var current = type; - var hasMore = false; - var argList = [ ]; - for (var i = 0; i < argc; i++) { - var index = current % numArguments; - current = (current / numArguments) << 0; - if (index != (numArguments - 1)) - hasMore = true; - argList.push(argPool[index]); - } - try { - func.apply(void 0, argList); - } catch (e) { - // we don't care what happens as long as we don't crash - } - type++; - } -} - -var knownProblems = { - "Abort": true, - - // Avoid calling the concat operation, because weird lengths - // may lead to out-of-memory. Ditto for StringBuilderJoin. - "StringBuilderConcat": true, - "StringBuilderJoin": true, - - // These functions use pseudo-stack-pointers and are not robust - // to unexpected integer values. - "DebugEvaluate": true, - - // These functions do nontrivial error checking in recursive calls, - // which means that we have to propagate errors back. - "SetFunctionBreakPoint": true, - "SetScriptBreakPoint": true, - "PrepareStep": true, - - // Too slow. - "DebugReferencedBy": true, - - // Calling disable/enable access checks may interfere with the - // the rest of the tests. - "DisableAccessChecks": true, - "EnableAccessChecks": true, - - // IS_VAR is special. - "IS_VAR": true, - - // Vararg with minimum number > 0. - "Call": true, - "SetAllocationTimeout": true, - - // Requires integer arguments to be non-negative. - "Apply": true, - - // That can only be invoked on Array.prototype. - "FinishArrayPrototypeSetup": true, - - "_SwapElements": true, - - // Performance critical functions which cannot afford type checks. - "_IsNativeOrStrictMode": true, - "_CallFunction": true, - - // Tries to allocate based on argument, and (correctly) throws - // out-of-memory if the request is too large. In practice, the - // size will be the number of captures of a RegExp. - "RegExpConstructResult": true, - "_RegExpConstructResult": true, - - // This functions perform some checks compile time (they require one of their - // arguments to be a compile time smi). - "_DateField": true, - "_GetFromCache": true, - - // This function expects its first argument to be a non-smi. - "_IsStringWrapperSafeForDefaultValueOf" : true, - - // Only applicable to strings. - "_HasCachedArrayIndex": true, - "_GetCachedArrayIndex": true, - "_OneByteSeqStringSetChar": true, - "_TwoByteSeqStringSetChar": true, - - // Only applicable to TypedArrays. - "_TypedArrayInitialize": true, - - // Only applicable to generators. - "_GeneratorNext": true, - "_GeneratorThrow": true, - - // Only applicable to DataViews. - "_DataViewInitialize": true, -}; - -var currentlyUncallable = { - // We need to find a way to test this without breaking the system. - "SystemBreak": true, - // Inserts an int3/stop instruction when run with --always-opt. - "_DebugBreakInOptimizedCode": true -}; - -function testNatives() { - var allNatives = %ListNatives(); - var start = 0; - var stop = (allNatives.length >> 2); - for (var i = start; i < stop; i++) { - var nativeInfo = allNatives[i]; - var name = nativeInfo[0]; - if (name in knownProblems || name in currentlyUncallable) - continue; - print(name); - var argc = nativeInfo[1]; - testArgumentCount(name, argc); - testArgumentTypes(name, argc); - } -} - -testNatives(); diff --git a/deps/v8/test/mjsunit/fuzz-natives-part2.js b/deps/v8/test/mjsunit/fuzz-natives-part2.js deleted file mode 100644 index 103e13291..000000000 --- a/deps/v8/test/mjsunit/fuzz-natives-part2.js +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -var RUN_WITH_ALL_ARGUMENT_ENTRIES = false; -var kOnManyArgumentsRemove = 5; - -function makeArguments() { - var result = [ ]; - result.push(17); - result.push(-31); - result.push(new Array(100)); - result.push(new Array(100003)); - result.push(Number.MIN_VALUE); - result.push("whoops"); - result.push("x"); - result.push({"x": 1, "y": 2}); - var slowCaseObj = {"a": 3, "b": 4, "c": 5}; - delete slowCaseObj.c; - result.push(slowCaseObj); - result.push(function () { return 8; }); - return result; -} - -var kArgObjects = makeArguments().length; - -function makeFunction(name, argc) { - var args = []; - for (var i = 0; i < argc; i++) - args.push("x" + i); - var argsStr = args.join(", "); - return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");"); -} - -function testArgumentCount(name, argc) { - for (var i = 0; i < 10; i++) { - var func = null; - try { - func = makeFunction(name, i); - } catch (e) { - if (e != "SyntaxError: Illegal access") throw e; - } - if (func === null && i == argc) { - throw "unexpected exception"; - } - var args = [ ]; - for (var j = 0; j < i; j++) - args.push(0); - try { - func.apply(void 0, args); - } catch (e) { - // we don't care what happens as long as we don't crash - } - } -} - -function testArgumentTypes(name, argc) { - var type = 0; - var hasMore = true; - var func = makeFunction(name, argc); - while (hasMore) { - var argPool = makeArguments(); - // When we have 5 or more arguments we lower the amount of tests cases - // by randomly removing kOnManyArgumentsRemove entries - var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ? - kArgObjects : kArgObjects-kOnManyArgumentsRemove; - if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) { - for (var i = 0; i < kOnManyArgumentsRemove; i++) { - var rand = Math.floor(Math.random() * (kArgObjects - i)); - argPool.splice(rand,1); - } - } - var current = type; - var hasMore = false; - var argList = [ ]; - for (var i = 0; i < argc; i++) { - var index = current % numArguments; - current = (current / numArguments) << 0; - if (index != (numArguments - 1)) - hasMore = true; - argList.push(argPool[index]); - } - try { - func.apply(void 0, argList); - } catch (e) { - // we don't care what happens as long as we don't crash - } - type++; - } -} - -var knownProblems = { - "Abort": true, - - // Avoid calling the concat operation, because weird lengths - // may lead to out-of-memory. Ditto for StringBuilderJoin. - "StringBuilderConcat": true, - "StringBuilderJoin": true, - - // These functions use pseudo-stack-pointers and are not robust - // to unexpected integer values. - "DebugEvaluate": true, - - // These functions do nontrivial error checking in recursive calls, - // which means that we have to propagate errors back. - "SetFunctionBreakPoint": true, - "SetScriptBreakPoint": true, - "PrepareStep": true, - - // Too slow. - "DebugReferencedBy": true, - - // Calling disable/enable access checks may interfere with the - // the rest of the tests. - "DisableAccessChecks": true, - "EnableAccessChecks": true, - - // IS_VAR is special. - "IS_VAR": true, - - // Vararg with minimum number > 0. - "Call": true, - "SetAllocationTimeout": true, - - // Requires integer arguments to be non-negative. - "Apply": true, - - // That can only be invoked on Array.prototype. - "FinishArrayPrototypeSetup": true, - - "_SwapElements": true, - - // Performance critical functions which cannot afford type checks. - "_IsNativeOrStrictMode": true, - "_CallFunction": true, - - // Tries to allocate based on argument, and (correctly) throws - // out-of-memory if the request is too large. In practice, the - // size will be the number of captures of a RegExp. - "RegExpConstructResult": true, - "_RegExpConstructResult": true, - - // This functions perform some checks compile time (they require one of their - // arguments to be a compile time smi). - "_DateField": true, - "_GetFromCache": true, - - // This function expects its first argument to be a non-smi. - "_IsStringWrapperSafeForDefaultValueOf" : true, - - // Only applicable to strings. - "_HasCachedArrayIndex": true, - "_GetCachedArrayIndex": true, - "_OneByteSeqStringSetChar": true, - "_TwoByteSeqStringSetChar": true, - - // Only applicable to TypedArrays. - "_TypedArrayInitialize": true, - - // Only applicable to generators. - "_GeneratorNext": true, - "_GeneratorThrow": true, - - // Only applicable to DataViews. - "_DataViewInitialize": true, -}; - -var currentlyUncallable = { - // We need to find a way to test this without breaking the system. - "SystemBreak": true, - // Inserts an int3/stop instruction when run with --always-opt. - "_DebugBreakInOptimizedCode": true -}; - -function testNatives() { - var allNatives = %ListNatives(); - var start = allNatives.length >> 2; - var stop = (allNatives.length >> 2)*2; - for (var i = start; i < stop; i++) { - var nativeInfo = allNatives[i]; - var name = nativeInfo[0]; - if (name in knownProblems || name in currentlyUncallable) - continue; - print(name); - var argc = nativeInfo[1]; - testArgumentCount(name, argc); - testArgumentTypes(name, argc); - } -} - -testNatives(); diff --git a/deps/v8/test/mjsunit/fuzz-natives-part3.js b/deps/v8/test/mjsunit/fuzz-natives-part3.js deleted file mode 100644 index 7a8125a73..000000000 --- a/deps/v8/test/mjsunit/fuzz-natives-part3.js +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -var RUN_WITH_ALL_ARGUMENT_ENTRIES = false; -var kOnManyArgumentsRemove = 5; - -function makeArguments() { - var result = [ ]; - result.push(17); - result.push(-31); - result.push(new Array(100)); - result.push(new Array(100003)); - result.push(Number.MIN_VALUE); - result.push("whoops"); - result.push("x"); - result.push({"x": 1, "y": 2}); - var slowCaseObj = {"a": 3, "b": 4, "c": 5}; - delete slowCaseObj.c; - result.push(slowCaseObj); - result.push(function () { return 8; }); - return result; -} - -var kArgObjects = makeArguments().length; - -function makeFunction(name, argc) { - var args = []; - for (var i = 0; i < argc; i++) - args.push("x" + i); - var argsStr = args.join(", "); - return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");"); -} - -function testArgumentCount(name, argc) { - for (var i = 0; i < 10; i++) { - var func = null; - try { - func = makeFunction(name, i); - } catch (e) { - if (e != "SyntaxError: Illegal access") throw e; - } - if (func === null && i == argc) { - throw "unexpected exception"; - } - var args = [ ]; - for (var j = 0; j < i; j++) - args.push(0); - try { - func.apply(void 0, args); - } catch (e) { - // we don't care what happens as long as we don't crash - } - } -} - -function testArgumentTypes(name, argc) { - var type = 0; - var hasMore = true; - var func = makeFunction(name, argc); - while (hasMore) { - var argPool = makeArguments(); - // When we have 5 or more arguments we lower the amount of tests cases - // by randomly removing kOnManyArgumentsRemove entries - var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ? - kArgObjects : kArgObjects-kOnManyArgumentsRemove; - if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) { - for (var i = 0; i < kOnManyArgumentsRemove; i++) { - var rand = Math.floor(Math.random() * (kArgObjects - i)); - argPool.splice(rand,1); - } - } - var current = type; - var hasMore = false; - var argList = [ ]; - for (var i = 0; i < argc; i++) { - var index = current % numArguments; - current = (current / numArguments) << 0; - if (index != (numArguments - 1)) - hasMore = true; - argList.push(argPool[index]); - } - try { - func.apply(void 0, argList); - } catch (e) { - // we don't care what happens as long as we don't crash - } - type++; - } -} - -var knownProblems = { - "Abort": true, - - // Avoid calling the concat operation, because weird lengths - // may lead to out-of-memory. Ditto for StringBuilderJoin. - "StringBuilderConcat": true, - "StringBuilderJoin": true, - - // These functions use pseudo-stack-pointers and are not robust - // to unexpected integer values. - "DebugEvaluate": true, - - // These functions do nontrivial error checking in recursive calls, - // which means that we have to propagate errors back. - "SetFunctionBreakPoint": true, - "SetScriptBreakPoint": true, - "PrepareStep": true, - - // Too slow. - "DebugReferencedBy": true, - - // Calling disable/enable access checks may interfere with the - // the rest of the tests. - "DisableAccessChecks": true, - "EnableAccessChecks": true, - - // IS_VAR is special. - "IS_VAR": true, - - // Vararg with minimum number > 0. - "Call": true, - "SetAllocationTimeout": true, - - // Requires integer arguments to be non-negative. - "Apply": true, - - // That can only be invoked on Array.prototype. - "FinishArrayPrototypeSetup": true, - - "_SwapElements": true, - - // Performance critical functions which cannot afford type checks. - "_IsNativeOrStrictMode": true, - "_CallFunction": true, - - // Tries to allocate based on argument, and (correctly) throws - // out-of-memory if the request is too large. In practice, the - // size will be the number of captures of a RegExp. - "RegExpConstructResult": true, - "_RegExpConstructResult": true, - - // This functions perform some checks compile time (they require one of their - // arguments to be a compile time smi). - "_DateField": true, - "_GetFromCache": true, - - // This function expects its first argument to be a non-smi. - "_IsStringWrapperSafeForDefaultValueOf" : true, - - // Only applicable to strings. - "_HasCachedArrayIndex": true, - "_GetCachedArrayIndex": true, - "_OneByteSeqStringSetChar": true, - "_TwoByteSeqStringSetChar": true, - - // Only applicable to TypedArrays. - "_TypedArrayInitialize": true, - - // Only applicable to generators. - "_GeneratorNext": true, - "_GeneratorThrow": true, - - // Only applicable to DataViews. - "_DataViewInitialize": true, -}; - -var currentlyUncallable = { - // We need to find a way to test this without breaking the system. - "SystemBreak": true, - // Inserts an int3/stop instruction when run with --always-opt. - "_DebugBreakInOptimizedCode": true -}; - -function testNatives() { - var allNatives = %ListNatives(); - var start = (allNatives.length >> 2)*2; - var stop = (allNatives.length >> 2)*3; - for (var i = start; i < stop; i++) { - var nativeInfo = allNatives[i]; - var name = nativeInfo[0]; - if (name in knownProblems || name in currentlyUncallable) - continue; - print(name); - var argc = nativeInfo[1]; - testArgumentCount(name, argc); - testArgumentTypes(name, argc); - } -} - -testNatives(); diff --git a/deps/v8/test/mjsunit/fuzz-natives-part4.js b/deps/v8/test/mjsunit/fuzz-natives-part4.js deleted file mode 100644 index 952374925..000000000 --- a/deps/v8/test/mjsunit/fuzz-natives-part4.js +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -var RUN_WITH_ALL_ARGUMENT_ENTRIES = false; -var kOnManyArgumentsRemove = 5; - -function makeArguments() { - var result = [ ]; - result.push(17); - result.push(-31); - result.push(new Array(100)); - result.push(new Array(100003)); - result.push(Number.MIN_VALUE); - result.push("whoops"); - result.push("x"); - result.push({"x": 1, "y": 2}); - var slowCaseObj = {"a": 3, "b": 4, "c": 5}; - delete slowCaseObj.c; - result.push(slowCaseObj); - result.push(function () { return 8; }); - return result; -} - -var kArgObjects = makeArguments().length; - -function makeFunction(name, argc) { - var args = []; - for (var i = 0; i < argc; i++) - args.push("x" + i); - var argsStr = args.join(", "); - return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");"); -} - -function testArgumentCount(name, argc) { - for (var i = 0; i < 10; i++) { - var func = null; - try { - func = makeFunction(name, i); - } catch (e) { - if (e != "SyntaxError: Illegal access") throw e; - } - if (func === null && i == argc) { - throw "unexpected exception"; - } - var args = [ ]; - for (var j = 0; j < i; j++) - args.push(0); - try { - func.apply(void 0, args); - } catch (e) { - // we don't care what happens as long as we don't crash - } - } -} - -function testArgumentTypes(name, argc) { - var type = 0; - var hasMore = true; - var func = makeFunction(name, argc); - while (hasMore) { - var argPool = makeArguments(); - // When we have 5 or more arguments we lower the amount of tests cases - // by randomly removing kOnManyArgumentsRemove entries - var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ? - kArgObjects : kArgObjects-kOnManyArgumentsRemove; - if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) { - for (var i = 0; i < kOnManyArgumentsRemove; i++) { - var rand = Math.floor(Math.random() * (kArgObjects - i)); - argPool.splice(rand,1); - } - } - var current = type; - var hasMore = false; - var argList = [ ]; - for (var i = 0; i < argc; i++) { - var index = current % numArguments; - current = (current / numArguments) << 0; - if (index != (numArguments - 1)) - hasMore = true; - argList.push(argPool[index]); - } - try { - func.apply(void 0, argList); - } catch (e) { - // we don't care what happens as long as we don't crash - } - type++; - } -} - -var knownProblems = { - "Abort": true, - - // Avoid calling the concat operation, because weird lengths - // may lead to out-of-memory. Ditto for StringBuilderJoin. - "StringBuilderConcat": true, - "StringBuilderJoin": true, - - // These functions use pseudo-stack-pointers and are not robust - // to unexpected integer values. - "DebugEvaluate": true, - - // These functions do nontrivial error checking in recursive calls, - // which means that we have to propagate errors back. - "SetFunctionBreakPoint": true, - "SetScriptBreakPoint": true, - "PrepareStep": true, - - // Too slow. - "DebugReferencedBy": true, - - // Calling disable/enable access checks may interfere with the - // the rest of the tests. - "DisableAccessChecks": true, - "EnableAccessChecks": true, - - // IS_VAR is special. - "IS_VAR": true, - - // Vararg with minimum number > 0. - "Call": true, - "SetAllocationTimeout": true, - - // Requires integer arguments to be non-negative. - "Apply": true, - - // That can only be invoked on Array.prototype. - "FinishArrayPrototypeSetup": true, - - "_SwapElements": true, - - // Performance critical functions which cannot afford type checks. - "_IsNativeOrStrictMode": true, - "_CallFunction": true, - - // Tries to allocate based on argument, and (correctly) throws - // out-of-memory if the request is too large. In practice, the - // size will be the number of captures of a RegExp. - "RegExpConstructResult": true, - "_RegExpConstructResult": true, - - // This functions perform some checks compile time (they require one of their - // arguments to be a compile time smi). - "_DateField": true, - "_GetFromCache": true, - - // This function expects its first argument to be a non-smi. - "_IsStringWrapperSafeForDefaultValueOf" : true, - - // Only applicable to strings. - "_HasCachedArrayIndex": true, - "_GetCachedArrayIndex": true, - "_OneByteSeqStringSetChar": true, - "_TwoByteSeqStringSetChar": true, - - // Only applicable to TypedArrays. - "_TypedArrayInitialize": true, - - // Only applicable to generators. - "_GeneratorNext": true, - "_GeneratorThrow": true, - - // Only applicable to DataViews. - "_DataViewInitialize": true, -}; - -var currentlyUncallable = { - // We need to find a way to test this without breaking the system. - "SystemBreak": true, - // Inserts an int3/stop instruction when run with --always-opt. - "_DebugBreakInOptimizedCode": true -}; - -function testNatives() { - var allNatives = %ListNatives(); - var start = (allNatives.length >> 2)*3; - var stop = allNatives.length; - for (var i = start; i < stop; i++) { - var nativeInfo = allNatives[i]; - var name = nativeInfo[0]; - if (name in knownProblems || name in currentlyUncallable) - continue; - print(name); - var argc = nativeInfo[1]; - testArgumentCount(name, argc); - testArgumentTypes(name, argc); - } -} - -testNatives(); diff --git a/deps/v8/test/mjsunit/harmony/array-fill.js b/deps/v8/test/mjsunit/harmony/array-fill.js new file mode 100644 index 000000000..571233f6f --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/array-fill.js @@ -0,0 +1,32 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-arrays + +assertEquals(1, Array.prototype.find.length); + +assertArrayEquals([].fill(8), []); +assertArrayEquals([0, 0, 0, 0, 0].fill(), [undefined, undefined, undefined, undefined, undefined]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8), [8, 8, 8, 8, 8]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, 1), [0, 8, 8, 8, 8]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, 10), [0, 0, 0, 0, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, -5), [8, 8, 8, 8, 8]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, 1, 4), [0, 8, 8, 8, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, 1, -1), [0, 8, 8, 8, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, 1, 42), [0, 8, 8, 8, 8]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, -3, 42), [0, 0, 8, 8, 8]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, -3, 4), [0, 0, 8, 8, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, -2, -1), [0, 0, 0, 8, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, -1, -3), [0, 0, 0, 0, 0]); +assertArrayEquals([0, 0, 0, 0, 0].fill(8, undefined, 4), [8, 8, 8, 8, 0]); +assertArrayEquals([ , , , , 0].fill(8, 1, 3), [, 8, 8, , 0]); + +// If the range if empty, the array is not actually modified and +// should not throw, even when applied to a frozen object. +assertArrayEquals(Object.freeze([1, 2, 3]).fill(0, 0, 0), [1, 2, 3]); + +// Test exceptions +assertThrows('Object.freeze([0]).fill()', TypeError); +assertThrows('Array.prototype.fill.call(null)', TypeError); +assertThrows('Array.prototype.fill.call(undefined)', TypeError); diff --git a/deps/v8/test/mjsunit/harmony/collections.js b/deps/v8/test/mjsunit/harmony/collections.js index 804a320f3..7bf7bf706 100644 --- a/deps/v8/test/mjsunit/harmony/collections.js +++ b/deps/v8/test/mjsunit/harmony/collections.js @@ -508,3 +508,347 @@ for (var i = 9; i >= 0; i--) { assertEquals('minus', m.get(0)); assertEquals('minus', m.get(-0)); })(); + + +(function TestSetForEachInvalidTypes() { + assertThrows(function() { + Set.prototype.set.forEach.call({}); + }, TypeError); + + var set = new Set(); + assertThrows(function() { + set.forEach({}); + }, TypeError); +})(); + + +(function TestSetForEach() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + var receiver = {}; + set.forEach(function(v, k, s) { + assertSame(v, k); + assertSame(set, s); + assertSame(this, receiver); + buffer += v; + if (v === 'a') { + set.delete('b'); + set.add('d'); + set.add('e'); + set.add('f'); + } else if (v === 'c') { + set.add('b'); + set.delete('e'); + } + }, receiver); + + assertEquals('acdfb', buffer); +})(); + + +(function TestSetForEachAddAtEnd() { + var set = new Set(); + set.add('a'); + set.add('b'); + + var buffer = ''; + set.forEach(function(v) { + buffer += v; + if (v === 'b') { + set.add('c'); + } + }); + + assertEquals('abc', buffer); +})(); + + +(function TestSetForEachDeleteNext() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + set.forEach(function(v) { + buffer += v; + if (v === 'b') { + set.delete('c'); + } + }); + + assertEquals('ab', buffer); +})(); + + +(function TestSetForEachDeleteVisitedAndAddAgain() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + set.forEach(function(v) { + buffer += v; + if (v === 'b') { + set.delete('a'); + } else if (v === 'c') { + set.add('a'); + } + }); + + assertEquals('abca', buffer); +})(); + + +(function TestSetForEachClear() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + set.forEach(function(v) { + buffer += v; + if (v === 'a') { + set.clear(); + set.add('d'); + set.add('e'); + } + }); + + assertEquals('ade', buffer); +})(); + + +(function TestSetForEachNested() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + set.forEach(function(v) { + buffer += v; + set.forEach(function(v) { + buffer += v; + if (v === 'a') { + set.delete('b'); + } + }); + }); + + assertEquals('aaccac', buffer); +})(); + + +(function TestSetForEachEarlyExit() { + var set = new Set(); + set.add('a'); + set.add('b'); + set.add('c'); + + var buffer = ''; + var ex = {}; + try { + set.forEach(function(v) { + buffer += v; + throw ex; + }); + } catch (e) { + assertEquals(ex, e); + } + assertEquals('a', buffer); +})(); + + +(function TestSetForEachGC() { + var set = new Set(); + for (var i = 0; i < 100; i++) { + set.add(i); + } + + var accumulated = 0; + set.forEach(function(v) { + accumulated += v; + if (v % 10 === 0) { + gc(); + } + }); + assertEquals(4950, accumulated); +})(); + +(function TestMapForEachInvalidTypes() { + assertThrows(function() { + Map.prototype.map.forEach.call({}); + }, TypeError); + + var map = new Map(); + assertThrows(function() { + map.forEach({}); + }, TypeError); +})(); + + +(function TestMapForEach() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + var receiver = {}; + map.forEach(function(v, k, m) { + assertEquals(map, m); + assertEquals(this, receiver); + buffer.push(k, v); + if (k === 0) { + map.delete(1); + map.set(3, 'd'); + map.set(4, 'e'); + map.set(5, 'f'); + } else if (k === 2) { + map.set(1, 'B'); + map.delete(4); + } + }, receiver); + + assertArrayEquals([0, 'a', 2, 'c', 3, 'd', 5, 'f', 1, 'B'], buffer); +})(); + + +(function TestMapForEachAddAtEnd() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + + var buffer = []; + map.forEach(function(v, k) { + buffer.push(k, v); + if (k === 1) { + map.set(2, 'c'); + } + }); + + assertArrayEquals([0, 'a', 1, 'b', 2, 'c'], buffer); +})(); + + +(function TestMapForEachDeleteNext() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + map.forEach(function(v, k) { + buffer.push(k, v); + if (k === 1) { + map.delete(2); + } + }); + + assertArrayEquals([0, 'a', 1, 'b'], buffer); +})(); + + +(function TestSetForEachDeleteVisitedAndAddAgain() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + map.forEach(function(v, k) { + buffer.push(k, v); + if (k === 1) { + map.delete(0); + } else if (k === 2) { + map.set(0, 'a'); + } + }); + + assertArrayEquals([0, 'a', 1, 'b', 2, 'c', 0, 'a'], buffer); +})(); + + +(function TestMapForEachClear() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + map.forEach(function(v, k) { + buffer.push(k, v); + if (k === 0) { + map.clear(); + map.set(3, 'd'); + map.set(4, 'e'); + } + }); + + assertArrayEquals([0, 'a', 3, 'd', 4, 'e'], buffer); +})(); + + +(function TestMapForEachNested() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + map.forEach(function(v, k) { + buffer.push(k, v); + map.forEach(function(v, k) { + buffer.push(k, v); + if (k === 0) { + map.delete(1); + } + }); + }); + + assertArrayEquals([0, 'a', 0, 'a', 2, 'c', 2, 'c', 0, 'a', 2, 'c'], buffer); +})(); + + +(function TestMapForEachEarlyExit() { + var map = new Map(); + map.set(0, 'a'); + map.set(1, 'b'); + map.set(2, 'c'); + + var buffer = []; + var ex = {}; + try { + map.forEach(function(v, k) { + buffer.push(k, v); + throw ex; + }); + } catch (e) { + assertEquals(ex, e); + } + assertArrayEquals([0, 'a'], buffer); +})(); + + +(function TestMapForEachGC() { + var map = new Map(); + for (var i = 0; i < 100; i++) { + map.set(i, i); + } + + var accumulated = 0; + map.forEach(function(v) { + accumulated += v; + if (v % 10 === 0) { + gc(); + } + }); + assertEquals(4950, accumulated); +})(); diff --git a/deps/v8/test/mjsunit/harmony/debug-blockscopes.js b/deps/v8/test/mjsunit/harmony/debug-blockscopes.js index ca2ab9e5a..f56a306b6 100644 --- a/deps/v8/test/mjsunit/harmony/debug-blockscopes.js +++ b/deps/v8/test/mjsunit/harmony/debug-blockscopes.js @@ -147,18 +147,10 @@ function CheckScopeContent(content, number, exec_state) { if (!scope.scopeObject().property('arguments').isUndefined()) { scope_size--; } - // Also ignore synthetic variable from catch block. - if (!scope.scopeObject().property('.catch-var').isUndefined()) { - scope_size--; - } // Skip property with empty name. if (!scope.scopeObject().property('').isUndefined()) { scope_size--; } - // Also ignore synthetic variable from block scopes. - if (!scope.scopeObject().property('.block').isUndefined()) { - scope_size--; - } if (count != scope_size) { print('Names found in scope:'); @@ -375,8 +367,9 @@ listener_delegate = function(exec_state) { debug.ScopeType.Local, debug.ScopeType.Global], exec_state); CheckScopeContent({x:'y'}, 0, exec_state); - // The function scope contains a temporary iteration variable. - CheckScopeContent({'.for.x':'y'}, 1, exec_state); + // The function scope contains a temporary iteration variable, but it is + // hidden to the debugger. + CheckScopeContent({}, 1, exec_state); }; for_loop_1(); EndTest(); @@ -400,8 +393,9 @@ listener_delegate = function(exec_state) { debug.ScopeType.Global], exec_state); CheckScopeContent({x:3}, 0, exec_state); CheckScopeContent({x:'y'}, 1, exec_state); - // The function scope contains a temporary iteration variable. - CheckScopeContent({'.for.x':'y'}, 2, exec_state); + // The function scope contains a temporary iteration variable, hidden to the + // debugger. + CheckScopeContent({}, 2, exec_state); }; for_loop_2(); EndTest(); diff --git a/deps/v8/test/mjsunit/harmony/generators-debug-scopes.js b/deps/v8/test/mjsunit/harmony/generators-debug-scopes.js new file mode 100644 index 000000000..ad0ea53de --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/generators-debug-scopes.js @@ -0,0 +1,326 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-debug-as debug --harmony-generators + +var Debug = debug.Debug; + +function RunTest(name, formals_and_body, args, handler, continuation) { + var handler_called = false; + var exception = null; + + function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + handler_called = true; + handler(exec_state); + } + } catch (e) { + exception = e; + } + } + + function run(thunk) { + handler_called = false; + exception = null; + + var res = thunk(); + if (continuation) + continuation(res); + + assertTrue(handler_called, "listener not called for " + name); + assertNull(exception, name + " / " + exception); + } + + var fun = Function.apply(null, formals_and_body); + var gen = (function*(){}).constructor.apply(null, formals_and_body); + + Debug.setListener(listener); + + run(function () { return fun.apply(null, args) }); + run(function () { return gen.apply(null, args).next().value }); + + // TODO(wingo): Uncomment after bug 2838 is fixed. + // Debug.setListener(null); +} + +// Check that two scope are the same. +function assertScopeMirrorEquals(scope1, scope2) { + assertEquals(scope1.scopeType(), scope2.scopeType()); + assertEquals(scope1.frameIndex(), scope2.frameIndex()); + assertEquals(scope1.scopeIndex(), scope2.scopeIndex()); + assertPropertiesEqual(scope1.scopeObject().value(), scope2.scopeObject().value()); +} + +function CheckFastAllScopes(scopes, exec_state) { + var fast_all_scopes = exec_state.frame().allScopes(true); + var length = fast_all_scopes.length; + assertTrue(scopes.length >= length); + for (var i = 0; i < scopes.length && i < length; i++) { + var scope = fast_all_scopes[length - i - 1]; + assertTrue(scope.isScope()); + assertEquals(scopes[scopes.length - i - 1], scope.scopeType()); + } +} + +// Check that the scope chain contains the expected types of scopes. +function CheckScopeChain(scopes, exec_state) { + var all_scopes = exec_state.frame().allScopes(); + assertEquals(scopes.length, exec_state.frame().scopeCount()); + assertEquals(scopes.length, all_scopes.length, "FrameMirror.allScopes length"); + for (var i = 0; i < scopes.length; i++) { + var scope = exec_state.frame().scope(i); + assertTrue(scope.isScope()); + assertEquals(scopes[i], scope.scopeType()); + assertScopeMirrorEquals(all_scopes[i], scope); + + // Check the global object when hitting the global scope. + if (scopes[i] == debug.ScopeType.Global) { + // Objects don't have same class (one is "global", other is "Object", + // so just check the properties directly. + assertPropertiesEqual(this, scope.scopeObject().value()); + } + } + CheckFastAllScopes(scopes, exec_state); + + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor("unspecified_running_state"); + + // Send a scopes request and check the result. + var json; + var request_json = '{"seq":0,"type":"request","command":"scopes"}'; + var response_json = dcp.processDebugJSONRequest(request_json); + var response = JSON.parse(response_json); + assertEquals(scopes.length, response.body.scopes.length); + for (var i = 0; i < scopes.length; i++) { + assertEquals(i, response.body.scopes[i].index); + assertEquals(scopes[i], response.body.scopes[i].type); + if (scopes[i] == debug.ScopeType.Local || + scopes[i] == debug.ScopeType.Closure) { + assertTrue(response.body.scopes[i].object.ref < 0); + } else { + assertTrue(response.body.scopes[i].object.ref >= 0); + } + var found = false; + for (var j = 0; j < response.refs.length && !found; j++) { + found = response.refs[j].handle == response.body.scopes[i].object.ref; + } + assertTrue(found, "Scope object " + response.body.scopes[i].object.ref + " not found"); + } +} + +// Check that the content of the scope is as expected. For functions just check +// that there is a function. +function CheckScopeContent(content, number, exec_state) { + var scope = exec_state.frame().scope(number); + var count = 0; + for (var p in content) { + var property_mirror = scope.scopeObject().property(p); + assertFalse(property_mirror.isUndefined(), 'property ' + p + ' not found in scope'); + if (typeof(content[p]) === 'function') { + assertTrue(property_mirror.value().isFunction()); + } else { + assertEquals(content[p], property_mirror.value().value(), 'property ' + p + ' has unexpected value'); + } + count++; + } + + // 'arguments' and might be exposed in the local and closure scope. Just + // ignore this. + var scope_size = scope.scopeObject().properties().length; + if (!scope.scopeObject().property('arguments').isUndefined()) { + scope_size--; + } + // Skip property with empty name. + if (!scope.scopeObject().property('').isUndefined()) { + scope_size--; + } + + if (count != scope_size) { + print('Names found in scope:'); + var names = scope.scopeObject().propertyNames(); + for (var i = 0; i < names.length; i++) { + print(names[i]); + } + } + assertEquals(count, scope_size); + + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor("unspecified_running_state"); + + // Send a scope request for information on a single scope and check the + // result. + var request_json = '{"seq":0,"type":"request","command":"scope","arguments":{"number":'; + request_json += scope.scopeIndex(); + request_json += '}}'; + var response_json = dcp.processDebugJSONRequest(request_json); + var response = JSON.parse(response_json); + assertEquals(scope.scopeType(), response.body.type); + assertEquals(number, response.body.index); + if (scope.scopeType() == debug.ScopeType.Local || + scope.scopeType() == debug.ScopeType.Closure) { + assertTrue(response.body.object.ref < 0); + } else { + assertTrue(response.body.object.ref >= 0); + } + var found = false; + for (var i = 0; i < response.refs.length && !found; i++) { + found = response.refs[i].handle == response.body.object.ref; + } + assertTrue(found, "Scope object " + response.body.object.ref + " not found"); +} + + +// Simple empty local scope. +RunTest("Local 1", + ['debugger;'], + [], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({}, 0, exec_state); + }); + +// Local scope with a parameter. +RunTest("Local 2", + ['a', 'debugger;'], + [1], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({a:1}, 0, exec_state); + }); + +// Local scope with a parameter and a local variable. +RunTest("Local 3", + ['a', 'var x = 3; debugger;'], + [1], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({a:1,x:3}, 0, exec_state); + }); + +// Local scope with parameters and local variables. +RunTest("Local 4", + ['a', 'b', 'var x = 3; var y = 4; debugger;'], + [1, 2], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({a:1,b:2,x:3,y:4}, 0, exec_state); + }); + +// Empty local scope with use of eval. +RunTest("Local 5", + ['eval(""); debugger;'], + [], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({}, 0, exec_state); + }); + +// Local introducing local variable using eval. +RunTest("Local 6", + ['eval("var i = 5"); debugger;'], + [], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({i:5}, 0, exec_state); + }); + +// Local scope with parameters, local variables and local variable introduced +// using eval. +RunTest("Local 7", + ['a', 'b', + "var x = 3; var y = 4;\n" + + "eval('var i = 5'); eval ('var j = 6');\n" + + "debugger;"], + [1, 2], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({a:1,b:2,x:3,y:4,i:5,j:6}, 0, exec_state); + }); + +// Nested empty with blocks. +RunTest("With", + ["with ({}) { with ({}) { debugger; } }"], + [], + function (exec_state) { + CheckScopeChain([debug.ScopeType.With, + debug.ScopeType.With, + debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({}, 0, exec_state); + CheckScopeContent({}, 1, exec_state); + }); + +// Simple closure formed by returning an inner function referering the outer +// functions arguments. +RunTest("Closure 1", + ['a', 'return function() { debugger; return a; }'], + [1], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Local, + debug.ScopeType.Closure, + debug.ScopeType.Global], exec_state); + CheckScopeContent({a:1}, 1, exec_state); + }, + function (result) { result() }); + +RunTest("The full monty", + ['a', 'b', + "var x = 3;\n" + + "var y = 4;\n" + + "eval('var i = 5');\n" + + "eval('var j = 6');\n" + + "function f(a, b) {\n" + + " var x = 9;\n" + + " var y = 10;\n" + + " eval('var i = 11');\n" + + " eval('var j = 12');\n" + + " with ({j:13}){\n" + + " return function() {\n" + + " var x = 14;\n" + + " with ({a:15}) {\n" + + " with ({b:16}) {\n" + + " debugger;\n" + + " some_global = a;\n" + + " return f;\n" + + " }\n" + + " }\n" + + " };\n" + + " }\n" + + "}\n" + + "return f(a, b);"], + [1, 2], + function (exec_state) { + CheckScopeChain([debug.ScopeType.With, + debug.ScopeType.With, + debug.ScopeType.Local, + debug.ScopeType.With, + debug.ScopeType.Closure, + debug.ScopeType.Closure, + debug.ScopeType.Global], exec_state); + CheckScopeContent({b:16}, 0, exec_state); + CheckScopeContent({a:15}, 1, exec_state); + CheckScopeContent({x:14}, 2, exec_state); + CheckScopeContent({j:13}, 3, exec_state); + CheckScopeContent({a:1,b:2,x:9,y:10,i:11,j:12}, 4, exec_state); + CheckScopeContent({a:1,b:2,x:3,y:4,i:5,j:6,f:function(){}}, 5, exec_state); + }, + function (result) { result() }); + +RunTest("Catch block 1", + ["try { throw 'Exception'; } catch (e) { debugger; }"], + [], + function (exec_state) { + CheckScopeChain([debug.ScopeType.Catch, + debug.ScopeType.Local, + debug.ScopeType.Global], exec_state); + CheckScopeContent({e:'Exception'}, 0, exec_state); + }); diff --git a/deps/v8/test/mjsunit/harmony/generators-relocation.js b/deps/v8/test/mjsunit/harmony/generators-relocation.js new file mode 100644 index 000000000..4074235c8 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/generators-relocation.js @@ -0,0 +1,61 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-debug-as debug --harmony-generators + +var Debug = debug.Debug; + +function assertIteratorResult(value, done, result) { + assertEquals({value: value, done: done}, result); +} + +function RunTest(formals_and_body, args, value1, value2) { + // A null listener. It isn't important what the listener does. + function listener(event, exec_state, event_data, data) { + } + + // Create the generator function outside a debugging context. It will probably + // be lazily compiled. + var gen = (function*(){}).constructor.apply(null, formals_and_body); + + // Instantiate the generator object. + var obj = gen.apply(null, args); + + // Advance to the first yield. + assertIteratorResult(value1, false, obj.next()); + + // Add a breakpoint on line 3 (the second yield). + var bp = Debug.setBreakPoint(gen, 3); + + // Enable the debugger, which should force recompilation of the generator + // function and relocation of the suspended generator activation. + Debug.setListener(listener); + + // Check that the generator resumes and suspends properly. + assertIteratorResult(value2, false, obj.next()); + + // Disable debugger -- should not force recompilation. + Debug.clearBreakPoint(bp); + Debug.setListener(null); + + // Run to completion. + assertIteratorResult(undefined, true, obj.next()); +} + +function prog(a, b, c) { + return a + ';\n' + 'yield ' + b + ';\n' + 'yield ' + c; +} + +// Simple empty local scope. +RunTest([prog('', '1', '2')], [], 1, 2); + +RunTest([prog('for (;;) break', '1', '2')], [], 1, 2); + +RunTest([prog('while (0) foo()', '1', '2')], [], 1, 2); + +RunTest(['a', prog('var x = 3', 'a', 'x')], [1], 1, 3); + +RunTest(['a', prog('', '1', '2')], [42], 1, 2); + +RunTest(['a', prog('for (;;) break', '1', '2')], [42], 1, 2); diff --git a/deps/v8/test/mjsunit/harmony/regress/regress-3280.js b/deps/v8/test/mjsunit/harmony/regress/regress-3280.js new file mode 100644 index 000000000..2fc72cc86 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/regress/regress-3280.js @@ -0,0 +1,25 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-generators --expose-debug-as debug + +var Debug = debug.Debug; + +var listener_called; + +function listener(event, exec_state, event_data, data) { + if (event == Debug.DebugEvent.Break) { + listener_called = true; + exec_state.frame().allScopes(); + } +} + +Debug.setListener(listener); + +function *generator_local_2(a) { + debugger; +} +generator_local_2(1).next(); + +assertTrue(listener_called, "listener not called"); diff --git a/deps/v8/test/mjsunit/harmony/string-contains.js b/deps/v8/test/mjsunit/harmony/string-contains.js index 700a6ed6b..b853ed99f 100644 --- a/deps/v8/test/mjsunit/harmony/string-contains.js +++ b/deps/v8/test/mjsunit/harmony/string-contains.js @@ -77,8 +77,6 @@ var TEST_INPUT = [{ }, { msg: "Boolean false", val: false }, { - msg: "Regular expression /\d+/", val: /\d+/ -}, { msg: "Empty array []", val: [] }, { msg: "Empty object {}", val: {} @@ -126,7 +124,7 @@ assertTrue("abc".contains("ab", NaN)); assertFalse("abc".contains("cd", NaN)); assertFalse("xyzzy".contains("zy\0", 2)); -var dots = Array(10000).join('.'); +var dots = Array(10000).join("."); assertFalse(dots.contains("\x01", 10000)); assertFalse(dots.contains("\0", 10000)); @@ -149,3 +147,20 @@ myobj = { }, contains: String.prototype.contains }; + +assertEquals("foo[a-z]+(bar)?".contains("[a-z]+"), true); +assertThrows("'foo[a-z]+(bar)?'.contains(/[a-z]+/)", TypeError); +assertThrows("'foo/[a-z]+/(bar)?'.contains(/[a-z]+/)", TypeError); +assertEquals("foo[a-z]+(bar)?".contains("(bar)?"), true); +assertThrows("'foo[a-z]+(bar)?'.contains(/(bar)?/)", TypeError); +assertThrows("'foo[a-z]+/(bar)?/'.contains(/(bar)?/)", TypeError); + +assertThrows("String.prototype.contains.call({ 'toString': function() { " + + "throw RangeError(); } }, /./)", RangeError); +assertThrows("String.prototype.contains.call({ 'toString': function() { " + + "return 'abc'; } }, /./)", TypeError); + +assertThrows("String.prototype.contains.apply({ 'toString': function() { " + + "throw RangeError(); } }, [/./])", RangeError); +assertThrows("String.prototype.contains.apply({ 'toString': function() { " + + "return 'abc'; } }, [/./])", TypeError); diff --git a/deps/v8/test/mjsunit/harmony/typedarrays.js b/deps/v8/test/mjsunit/harmony/typedarrays.js index e20fbade9..f26b0be56 100644 --- a/deps/v8/test/mjsunit/harmony/typedarrays.js +++ b/deps/v8/test/mjsunit/harmony/typedarrays.js @@ -310,6 +310,11 @@ function TestSubArray(constructor, item) { SubarrayTestCase(constructor, item, 10,90, 100, 90); SubarrayTestCase(constructor, item, 10,90, 100, -10); + + var method = constructor.prototype.subarray; + method.call(new constructor(100), 0, 100); + var o = {}; + assertThrows(function() { method.call(o, 0, 100); }, TypeError); } TestSubArray(Uint8Array, 0xFF); diff --git a/deps/v8/test/mjsunit/invalid-lhs.js b/deps/v8/test/mjsunit/invalid-lhs.js index 92f5c6ff7..52ee89582 100644 --- a/deps/v8/test/mjsunit/invalid-lhs.js +++ b/deps/v8/test/mjsunit/invalid-lhs.js @@ -32,27 +32,34 @@ assertThrows("12 = 12", ReferenceError); assertThrows("x++ = 12", ReferenceError); assertThrows("eval('var x') = 12", ReferenceError); -assertThrows("if (false) eval('var x') = 12", ReferenceError); +assertThrows("if (false) 12 = 12", ReferenceError); +assertDoesNotThrow("if (false) eval('var x') = 12", ReferenceError); // Pre- and post-fix operations: assertThrows("12++", ReferenceError); assertThrows("12--", ReferenceError); -assertThrows("--12", ReferenceError); assertThrows("++12", ReferenceError); +assertThrows("--12", ReferenceError); assertThrows("++(eval('12'))", ReferenceError); assertThrows("(eval('12'))++", ReferenceError); -assertThrows("if (false) ++(eval('12'))", ReferenceError); -assertThrows("if (false) (eval('12'))++", ReferenceError); +assertThrows("if (false) 12++", ReferenceError); +assertThrows("if (false) 12--", ReferenceError); +assertThrows("if (false) ++12", ReferenceError); +assertThrows("if (false) --12", ReferenceError); +assertDoesNotThrow("if (false) ++(eval('12'))", ReferenceError); +assertDoesNotThrow("if (false) (eval('12'))++", ReferenceError); // For in: assertThrows("for (12 in [1]) print(12);", ReferenceError); assertThrows("for (eval('var x') in [1]) print(12);", ReferenceError); -assertThrows("if (false) for (eval('0') in [1]) print(12);", ReferenceError); +assertThrows("if (false) for (12 in [1]) print(12);", ReferenceError); +assertDoesNotThrow("if (false) for (eval('0') in [1]) print(12);", ReferenceError); // For: assertThrows("for (12 = 1;;) print(12);", ReferenceError); assertThrows("for (eval('var x') = 1;;) print(12);", ReferenceError); -assertThrows("if (false) for (eval('var x') = 1;;) print(12);", ReferenceError); +assertThrows("if (false) for (12 = 1;;) print(12);", ReferenceError); +assertDoesNotThrow("if (false) for (eval('var x') = 1;;) print(12);", ReferenceError); // Assignments to 'this'. assertThrows("this = 42", ReferenceError); @@ -63,3 +70,5 @@ assertThrows("this++", ReferenceError); assertThrows("++this", ReferenceError); assertThrows("this--", ReferenceError); assertThrows("--this", ReferenceError); +assertThrows("if (false) this = 42", ReferenceError); +assertThrows("if (false) this++", ReferenceError); diff --git a/deps/v8/test/mjsunit/math-abs.js b/deps/v8/test/mjsunit/math-abs.js index d6ee3f2da..09b9c88f7 100644 --- a/deps/v8/test/mjsunit/math-abs.js +++ b/deps/v8/test/mjsunit/math-abs.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax function zero() { var x = 0.5; diff --git a/deps/v8/test/mjsunit/math-floor-part1.js b/deps/v8/test/mjsunit/math-floor-part1.js index b57b3e20d..bae47dc3c 100644 --- a/deps/v8/test/mjsunit/math-floor-part1.js +++ b/deps/v8/test/mjsunit/math-floor-part1.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=128 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax var test_id = 0; @@ -37,6 +37,15 @@ function testFloor(expect, input) { assertEquals(expect, test(input)); %OptimizeFunctionOnNextCall(test); assertEquals(expect, test(input)); + + var test_double_output = new Function( + 'n', + '"' + (test_id++) + '";return Math.floor(n) + -0.0'); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + %OptimizeFunctionOnNextCall(test_double_output); + assertEquals(expect, test_double_output(input)); } function zero() { diff --git a/deps/v8/test/mjsunit/math-floor-part2.js b/deps/v8/test/mjsunit/math-floor-part2.js index b6d51b2bd..ad60fba45 100644 --- a/deps/v8/test/mjsunit/math-floor-part2.js +++ b/deps/v8/test/mjsunit/math-floor-part2.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax var test_id = 0; @@ -37,6 +37,15 @@ function testFloor(expect, input) { assertEquals(expect, test(input)); %OptimizeFunctionOnNextCall(test); assertEquals(expect, test(input)); + + var test_double_output = new Function( + 'n', + '"' + (test_id++) + '";return Math.floor(n) + -0.0'); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + %OptimizeFunctionOnNextCall(test_double_output); + assertEquals(expect, test_double_output(input)); } function zero() { diff --git a/deps/v8/test/mjsunit/math-floor-part3.js b/deps/v8/test/mjsunit/math-floor-part3.js index db2592343..a6d1c5e85 100644 --- a/deps/v8/test/mjsunit/math-floor-part3.js +++ b/deps/v8/test/mjsunit/math-floor-part3.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax var test_id = 0; @@ -37,6 +37,15 @@ function testFloor(expect, input) { assertEquals(expect, test(input)); %OptimizeFunctionOnNextCall(test); assertEquals(expect, test(input)); + + var test_double_output = new Function( + 'n', + '"' + (test_id++) + '";return Math.floor(n) + -0.0'); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + %OptimizeFunctionOnNextCall(test_double_output); + assertEquals(expect, test_double_output(input)); } function zero() { diff --git a/deps/v8/test/mjsunit/math-floor-part4.js b/deps/v8/test/mjsunit/math-floor-part4.js index c63362308..58212b4c5 100644 --- a/deps/v8/test/mjsunit/math-floor-part4.js +++ b/deps/v8/test/mjsunit/math-floor-part4.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 --allow-natives-syntax +// Flags: --max-new-space-size=2 --allow-natives-syntax var test_id = 0; @@ -37,6 +37,15 @@ function testFloor(expect, input) { assertEquals(expect, test(input)); %OptimizeFunctionOnNextCall(test); assertEquals(expect, test(input)); + + var test_double_output = new Function( + 'n', + '"' + (test_id++) + '";return Math.floor(n) + -0.0'); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + assertEquals(expect, test_double_output(input)); + %OptimizeFunctionOnNextCall(test_double_output); + assertEquals(expect, test_double_output(input)); } function zero() { diff --git a/deps/v8/test/mjsunit/math-round.js b/deps/v8/test/mjsunit/math-round.js index bf5906920..12a92657a 100644 --- a/deps/v8/test/mjsunit/math-round.js +++ b/deps/v8/test/mjsunit/math-round.js @@ -38,6 +38,16 @@ function testRound(expect, input) { assertEquals(expect, doRound(input)); %OptimizeFunctionOnNextCall(doRound); assertEquals(expect, doRound(input)); + + // Force the Math.round() representation to double to exercise the associated + // optimized code. + var doRoundToDouble = new Function('input', + '"' + (test_id++) + '";return Math.round(input) + -0.0'); + assertEquals(expect, doRoundToDouble(input)); + assertEquals(expect, doRoundToDouble(input)); + assertEquals(expect, doRoundToDouble(input)); + %OptimizeFunctionOnNextCall(doRoundToDouble); + assertEquals(expect, doRoundToDouble(input)); } testRound(0, 0); diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status index 3283070c6..117a0e6f7 100644 --- a/deps/v8/test/mjsunit/mjsunit.status +++ b/deps/v8/test/mjsunit/mjsunit.status @@ -68,17 +68,13 @@ 'd8-performance-now': [PASS, NO_VARIANTS], ############################################################################## - # These use a built-in that's only present in debug mode. They take - # too long to run in debug mode on ARM and MIPS. - 'fuzz-natives-part*': [PASS, ['mode == release or arch == arm or arch == android_arm or arch == android_arm64 or arch == mipsel', SKIP]], - 'big-object-literal': [PASS, ['arch == arm or arch == android_arm or arch == android_arm64', SKIP]], # Issue 488: this test sometimes times out. 'array-constructor': [PASS, TIMEOUT], # Very slow on ARM and MIPS, contains no architecture dependent code. - 'unicode-case-overoptimization': [PASS, NO_VARIANTS, ['arch == arm or arch == android_arm or arch == android_arm64 or arch == mipsel', TIMEOUT]], + 'unicode-case-overoptimization': [PASS, NO_VARIANTS, ['arch == arm or arch == android_arm or arch == android_arm64 or arch == mipsel or arch == mips', TIMEOUT]], ############################################################################## # This test expects to reach a certain recursion depth, which may not work @@ -93,7 +89,11 @@ # This test sets the umask on a per-process basis and hence cannot be # used in multi-threaded runs. # On android there is no /tmp directory. - 'd8-os': [PASS, ['isolates or arch == android_arm or arch == android_arm64 or arch == android_ia32', SKIP]], + # Currently d8-os generates a temporary directory name using Math.random(), so + # we cannot run several variants of d8-os simultaneously, since all of them + # get the same random seed and would generate the same directory name. Besides + # that, it doesn't make sense to run several variants of d8-os anyways. + 'd8-os': [PASS, NO_VARIANTS, ['isolates or arch == android_arm or arch == android_arm64 or arch == android_ia32', SKIP]], 'tools/tickprocessor': [PASS, ['arch == android_arm or arch == android_arm64 or arch == android_ia32', SKIP]], ############################################################################## @@ -118,6 +118,11 @@ # BUG(v8:2989). PASS/FAIL on linux32 because crankshaft is turned off for # nosse2. Also for arm novfp3. 'regress/regress-2989': [FAIL, NO_VARIANTS, ['system == linux and arch == ia32 or arch == arm and simulator == True', PASS]], + + # Skip endain dependent test for mips due to different typed views of the same + # array buffer. + 'nans': [PASS, ['arch == mips', SKIP]], + }], # ALWAYS ############################################################################## @@ -288,7 +293,7 @@ }], # 'arch == arm or arch == android_arm' ############################################################################## -['arch == mipsel', { +['arch == mipsel or arch == mips', { # Slow tests which times out in debug mode. 'try': [PASS, ['mode == debug', SKIP]], @@ -324,7 +329,7 @@ # Currently always deopt on minus zero 'math-floor-of-div-minus-zero': [SKIP], -}], # 'arch == mipsel' +}], # 'arch == mipsel or arch == mips' ############################################################################## # Native Client uses the ARM simulator so will behave similarly to arm @@ -342,11 +347,6 @@ 'debug-liveedit-restart-frame': [SKIP], 'debug-liveedit-double-call': [SKIP], - # This test dumps core for arm.debug, so no reason to expect it to work - # for NaCl. The other three fuzz-natives tests seem to run fine. - # As noted above none of them are run in the arm.debug case. - 'fuzz-natives-part4': [SKIP], - # NaCl builds have problems with this test since Pepper_28. # V8 Issue 2786 'math-exp-precision': [SKIP], @@ -367,6 +367,10 @@ # Lead to OOM: 'string-oom-*': [SKIP], + + # Crashes. + 'harmony/private': [SKIP], + 'harmony/symbols': [SKIP], }], # 'arch == nacl_ia32 or arch == nacl_x64' ############################################################################## diff --git a/deps/v8/test/mjsunit/object-freeze.js b/deps/v8/test/mjsunit/object-freeze.js index 3b7987402..4144936d0 100644 --- a/deps/v8/test/mjsunit/object-freeze.js +++ b/deps/v8/test/mjsunit/object-freeze.js @@ -322,13 +322,15 @@ Object.freeze(obj); // sufficient. assertTrue(Object.isSealed(obj)); -assertDoesNotThrow(function() { obj.push(); }); -assertDoesNotThrow(function() { obj.unshift(); }); -assertDoesNotThrow(function() { obj.splice(0,0); }); +// Verify that the length can't be written by builtins. +assertThrows(function() { obj.push(); }, TypeError); +assertThrows(function() { obj.unshift(); }, TypeError); +assertThrows(function() { obj.splice(0,0); }, TypeError); assertTrue(Object.isFrozen(obj)); // Verify that an item can't be changed with splice. assertThrows(function() { obj.splice(0,1,1); }, TypeError); +assertTrue(Object.isFrozen(obj)); // Verify that unshift() with no arguments will fail if it reifies from // the prototype into the object. diff --git a/deps/v8/test/mjsunit/readonly-accessor.js b/deps/v8/test/mjsunit/readonly-accessor.js new file mode 100644 index 000000000..5a73525fe --- /dev/null +++ b/deps/v8/test/mjsunit/readonly-accessor.js @@ -0,0 +1,7 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var foo = {}; +foo.__proto__ = new String("bar"); +foo.length = 20; diff --git a/deps/v8/test/mjsunit/regress/regress-1548.js b/deps/v8/test/mjsunit/regress/regress-1548.js index 074007b91..5330e131e 100644 --- a/deps/v8/test/mjsunit/regress/regress-1548.js +++ b/deps/v8/test/mjsunit/regress/regress-1548.js @@ -30,19 +30,19 @@ function testfn(f) { return [1].map(f)[0]; } function foo() { return [].map.caller; } -assertEquals(null, testfn(foo)); +assertThrows(function() { testfn(foo); } ); // Try to delete the caller property (to make sure that we can't get to the // caller accessor on the prototype. delete Array.prototype.map.caller; -assertEquals(null, testfn(foo)); +assertThrows(function() { testfn(foo); } ); // Redo tests with arguments object. function testarguments(f) { return [1].map(f)[0]; } function bar() { return [].map.arguments; } -assertEquals(null, testfn(bar)); +assertThrows(function() { testarguments(bar); } ); // Try to delete the arguments property (to make sure that we can't get to the // caller accessor on the prototype. delete Array.prototype.map.arguments; -assertEquals(null, testarguments(bar)); +assertThrows(function() { testarguments(bar); } ); diff --git a/deps/v8/test/mjsunit/regress/regress-1708.js b/deps/v8/test/mjsunit/regress/regress-1708.js index ab50e0786..48ee79c77 100644 --- a/deps/v8/test/mjsunit/regress/regress-1708.js +++ b/deps/v8/test/mjsunit/regress/regress-1708.js @@ -27,14 +27,18 @@ // Regression test of a very rare corner case where left-trimming an // array caused invalid marking bit patterns on lazily swept pages. +// +// Lazy sweeping was deprecated. We are keeping the test case to make +// sure that concurrent sweeping, which relies on similar assumptions +// as lazy sweeping works correctly. -// Flags: --expose-gc --noincremental-marking --max-new-space-size 1000 +// Flags: --expose-gc --noincremental-marking --max-new-space-size=2 (function() { var head = new Array(1); var tail = head; - // Fill heap to increase old-space size and trigger lazy sweeping on + // Fill heap to increase old-space size and trigger concurrent sweeping on // some of the old-space pages. for (var i = 0; i < 200; i++) { tail[1] = new Array(1000); @@ -44,7 +48,7 @@ gc(); gc(); // At this point "array" should have been promoted to old-space and be - // located in a lazy swept page with intact marking bits. Now shift + // located in a concurrently swept page with intact marking bits. Now shift // the array to trigger left-trimming operations. assertEquals(100, array.length); for (var i = 0; i < 50; i++) { @@ -54,7 +58,7 @@ // At this point "array" should have been trimmed from the left with // marking bits being correctly transfered to the new object start. - // Scavenging operations cause lazy sweeping to advance and verify + // Scavenging operations cause concurrent sweeping to advance and verify // that marking bit patterns are still sane. for (var i = 0; i < 200; i++) { tail[1] = new Array(1000); diff --git a/deps/v8/test/mjsunit/regress/regress-2419.js b/deps/v8/test/mjsunit/regress/regress-2419.js index 4ffafbe6e..612e6dbfd 100644 --- a/deps/v8/test/mjsunit/regress/regress-2419.js +++ b/deps/v8/test/mjsunit/regress/regress-2419.js @@ -27,10 +27,10 @@ var a = [5, 4, 3, 2, 1, 0]; Object.freeze(a); -a.sort(); +assertThrows(function() { a.sort(); }); assertArrayEquals([5, 4, 3, 2, 1, 0], a); var b = {0: 5, 1: 4, 2: 3, 3: 2, 4: 1, 5: 0, length: 6}; Object.freeze(b); -Array.prototype.sort.call(b); +assertThrows(function() { Array.prototype.sort.call(b); }); assertPropertiesEqual({0: 5, 1: 4, 2: 3, 3: 2, 4: 1, 5: 0, length: 6}, b); diff --git a/deps/v8/test/mjsunit/regress/regress-3204.js b/deps/v8/test/mjsunit/regress/regress-3204.js index dc754ff2d..b3161be49 100644 --- a/deps/v8/test/mjsunit/regress/regress-3204.js +++ b/deps/v8/test/mjsunit/regress/regress-3204.js @@ -4,6 +4,21 @@ // Flags: --allow-natives-syntax +// ----------------------------------------------------------------------------- + +function SmiTaggingCanOverflow(x) { + x = x | 0; + if (x == 0) return; + return x; +} + +SmiTaggingCanOverflow(2147483647); +SmiTaggingCanOverflow(2147483647); +%OptimizeFunctionOnNextCall(SmiTaggingCanOverflow); +assertEquals(2147483647, SmiTaggingCanOverflow(2147483647)); + +// ----------------------------------------------------------------------------- + function ModILeftCanBeNegative() { var x = 0; for (var i = -1; i < 0; ++i) x = i % 2; @@ -14,6 +29,8 @@ ModILeftCanBeNegative(); %OptimizeFunctionOnNextCall(ModILeftCanBeNegative); assertEquals(-1, ModILeftCanBeNegative()); +// ----------------------------------------------------------------------------- + function ModIRightCanBeZero() { var x = 0; for (var i = -1; i <= 0; ++i) x = (2 % i) | 0; diff --git a/deps/v8/test/mjsunit/regress/regress-3255.js b/deps/v8/test/mjsunit/regress/regress-3255.js new file mode 100644 index 000000000..0e7743537 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-3255.js @@ -0,0 +1,19 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --enable-slow-asserts + +var arr = []; +var str = new String('x'); + +function f(a,b) { + a[b] = 1; +} + +f(arr, 0); +f(str, 0); +f(str, 0); + +// This is just to trigger elements validation, object already broken. +%SetProperty(str, 1, 'y', 0); diff --git a/deps/v8/test/mjsunit/regress/regress-3281.js b/deps/v8/test/mjsunit/regress/regress-3281.js new file mode 100644 index 000000000..ebb25991d --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-3281.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --harmony-collections + +// Should not crash or raise an exception. + +var s = new Set(); +var setIterator = %SetCreateIterator(s, 2); + +var m = new Map(); +var mapIterator = %MapCreateIterator(m, 2); diff --git a/deps/v8/test/mjsunit/regress/regress-3294.js b/deps/v8/test/mjsunit/regress/regress-3294.js new file mode 100644 index 000000000..400e6b688 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-3294.js @@ -0,0 +1,8 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var e = new Error('message'); +var keys = Object.keys(e); +e.stack; +assertEquals(keys, Object.keys(e)); diff --git a/deps/v8/test/mjsunit/regress/regress-353058.js b/deps/v8/test/mjsunit/regress/regress-353058.js new file mode 100644 index 000000000..0fef24624 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-353058.js @@ -0,0 +1,27 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --stack-size=150 +// Requries ASAN. + +function runNearStackLimit(f) { function t() { try { t(); } catch(e) { f(); } }; try { t(); } catch(e) {} } +function __f_0( + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x +) { } +runNearStackLimit(__f_0); diff --git a/deps/v8/test/mjsunit/regress/regress-355486.js b/deps/v8/test/mjsunit/regress/regress-355486.js new file mode 100644 index 000000000..55362a134 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-355486.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function f() { var v = arguments[0]; } +function g() { f(); } + +g(); +g(); +%OptimizeFunctionOnNextCall(g); +g(); diff --git a/deps/v8/test/mjsunit/regress/regress-357054.js b/deps/v8/test/mjsunit/regress/regress-357054.js new file mode 100644 index 000000000..92a066edc --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-357054.js @@ -0,0 +1,10 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +[].__defineSetter__(0, function() { }); +function f(a,i,v) { a[i] = v; } +a = [0,0,0]; +f(a,0,5); +a = new Float32Array(5); +f(a,2,5.5); diff --git a/deps/v8/test/mjsunit/regress/regress-357103.js b/deps/v8/test/mjsunit/regress/regress-357103.js new file mode 100644 index 000000000..692729ddb --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-357103.js @@ -0,0 +1,14 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +%SetFlags("--gc-interval=1"); + +var key = "Huckleberry Finn" + "Tom Sawyer"; +var o = {}; +function f() { o[key] = "Adventures"; } + +f(); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-357105.js b/deps/v8/test/mjsunit/regress/regress-357105.js new file mode 100644 index 000000000..d3eefd0f1 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-357105.js @@ -0,0 +1,23 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --expose-gc + +var global = { }; + +function do_nothing() { } + +function f(opt_gc) { + var x = new Array(3); + x[0] = 10; + opt_gc(); + global[1] = 15.5; + return x; +} + +gc(); +global = f(gc); +global = f(do_nothing); +%OptimizeFunctionOnNextCall(f); +global = f(do_nothing); diff --git a/deps/v8/test/mjsunit/regress/regress-358057.js b/deps/v8/test/mjsunit/regress/regress-358057.js new file mode 100644 index 000000000..c5fe73a03 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-358057.js @@ -0,0 +1,19 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax +__v_0 = new Uint8ClampedArray(10); +for (var i = 0; i < 10; i++) { + __v_0[i] = 0xAA; +} +function __f_12(__v_6) { + if (__v_6 < 0) { + __v_1 = __v_0[__v_6 + 10]; + return __v_1; + } +} + +assertEquals(0xAA, __f_12(-1)); +%OptimizeFunctionOnNextCall(__f_12); +assertEquals(0xAA, __f_12(-1)); diff --git a/deps/v8/test/mjsunit/regress/regress-358059.js b/deps/v8/test/mjsunit/regress/regress-358059.js new file mode 100644 index 000000000..30738f9ae --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-358059.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function f(a, b) { return b + (a.x++); } +var o = {}; +o.__defineGetter__('x', function() { return 1; }); +assertEquals(4, f(o, 3)); +assertEquals(4, f(o, 3)); +%OptimizeFunctionOnNextCall(f); +assertEquals(4, f(o, 3)); diff --git a/deps/v8/test/mjsunit/regress/regress-358088.js b/deps/v8/test/mjsunit/regress/regress-358088.js new file mode 100644 index 000000000..222bba60d --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-358088.js @@ -0,0 +1,18 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function f(a) { + a[a.length] = 1; +} + +function g(a, i, v) { + a[i] = v; +} + +f([]); // f KeyedStoreIC goes to 1.GROW +o = {}; +g(o); // We've added property "undefined" to o + +o = {}; // A transition on property "undefined" exists from {} +f(o); // Store should go generic. diff --git a/deps/v8/test/mjsunit/regress/regress-358090.js b/deps/v8/test/mjsunit/regress/regress-358090.js new file mode 100644 index 000000000..d9c07e857 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-358090.js @@ -0,0 +1,8 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var x = Array(100000); +y = Array.apply(Array, x); +y.unshift(4); +y.shift(); diff --git a/deps/v8/test/mjsunit/regress/regress-359441.js b/deps/v8/test/mjsunit/regress/regress-359441.js new file mode 100644 index 000000000..d96468c33 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-359441.js @@ -0,0 +1,23 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function g() { + this.x = {}; +} + +function f() { + new g(); +} + +function deopt(x) { + %DeoptimizeFunction(f); +} + +f(); +f(); +%OptimizeFunctionOnNextCall(f); +Object.prototype.__defineSetter__('x', deopt); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-359491.js b/deps/v8/test/mjsunit/regress/regress-359491.js new file mode 100644 index 000000000..d72875a29 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-359491.js @@ -0,0 +1,61 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +(function () { + function f(a, b, mode) { + if (mode) { + return a === b; + } else { + return a === b; + } + } + + // Gather type feedback for both branches. + f("a", "b", 1); + f("c", "d", 1); + f("a", "b", 0); + f("c", "d", 0); + + function g(mode) { + var x = 1e10 | 0; + f(x, x, mode); + } + + // Gather type feedback for g, but only on one branch for f. + g(1); + g(1); + %OptimizeFunctionOnNextCall(g); + // Optimize g, which inlines f. Both branches in f will see the constant. + g(0); +})(); + +(function () { + function f(a, b, mode) { + if (mode) { + return a === b; + } else { + return a === b; + } + } + + // Gather type feedback for both branches. + f({ a : 1}, {b : 1}, 1); + f({ c : 1}, {d : 1}, 1); + f({ a : 1}, {c : 1}, 0); + f({ b : 1}, {d : 1}, 0); + + function g(mode) { + var x = 1e10 | 0; + f(x, x, mode); + } + + // Gather type feedback for g, but only on one branch for f. + g(1); + g(1); + %OptimizeFunctionOnNextCall(g); + // Optimize g, which inlines f. Both branches in f will see the constant. + g(0); +})(); diff --git a/deps/v8/test/mjsunit/greedy.js b/deps/v8/test/mjsunit/regress/regress-359525.js index 8c49e41b9..6a82a38fd 100644 --- a/deps/v8/test/mjsunit/greedy.js +++ b/deps/v8/test/mjsunit/regress/regress-359525.js @@ -1,4 +1,4 @@ -// Copyright 2008 the V8 project authors. All rights reserved. +// Copyright 2014 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -25,36 +25,20 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --gc-greedy --noverify-heap - -function IterativeFib(n) { - var f0 = 0, f1 = 1; - for (; n > 0; --n) { - var f2 = f0 + f1; - f0 = f1; f1 = f2; - } - return f0; -} - -function RecursiveFib(n) { - if (n <= 1) return n; - return RecursiveFib(n - 1) + RecursiveFib(n - 2); +// Test BinaryOpICStub substract +var a; +for (var i = 0; i < 2; i++) { + var x = 42 + a - {}; + print(x); + a = ""; } -function Check(n, expected) { - var i = IterativeFib(n); - var r = RecursiveFib(n); - assertEquals(i, expected); - assertEquals(r, expected); +// Test BinaryOpICStub add +var b = 1.4; +var val = 0; +var o = {valueOf:function() { val++; return 10; }}; +for (var i = 0; i < 2; i++) { + var x = (b + i) + o; + b = ""; } - -Check(0, 0); -Check(1, 1); -Check(2, 1); -Check(3, 1 + 1); -Check(4, 2 + 1); -Check(5, 3 + 2); -Check(10, 55); -Check(15, 610); -Check(20, 6765); -assertEquals(IterativeFib(75), 2111485077978050); +assertEquals(val, 2); diff --git a/deps/v8/test/mjsunit/regress/regress-360733.js b/deps/v8/test/mjsunit/regress/regress-360733.js new file mode 100644 index 000000000..28f73ea44 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-360733.js @@ -0,0 +1,14 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --stack_size=150 + +function f(a) { + f(a + 1); +} + +Error.__defineGetter__('stackTraceLimit', function() { }); +try { + f(0); +} catch (e) { } diff --git a/deps/v8/test/mjsunit/regress/regress-361025.js b/deps/v8/test/mjsunit/regress/regress-361025.js new file mode 100644 index 000000000..74f50d86e --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-361025.js @@ -0,0 +1,10 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-gc + +var x = new Object(); +x.__defineGetter__('a', function() { return 7 }); +JSON.parse('{"a":2600753951}'); +gc(); diff --git a/deps/v8/test/mjsunit/regress/regress-361608.js b/deps/v8/test/mjsunit/regress/regress-361608.js new file mode 100644 index 000000000..b3cc90cfd --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-361608.js @@ -0,0 +1,20 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function f() {}; +int_array = [1]; + +function foo() { + var x; + for (var i = -1; i < 0; i++) { + x = int_array[i + 1]; + f(function() { x = i; }); + } +} + +foo(); +%OptimizeFunctionOnNextCall(foo); +foo(); diff --git a/deps/v8/test/mjsunit/regress/regress-362128.js b/deps/v8/test/mjsunit/regress/regress-362128.js new file mode 100644 index 000000000..18ac5db90 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-362128.js @@ -0,0 +1,37 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Flags: --allow-natives-syntax + +function genM() { + "use strict"; + return function () { + return this.field; + }; +} + +function genR() { + var x = { + field: 10 + } + return x; +} + +method = {}; +receiver = {}; + +method = genM("A"); +receiver = genR("A"); + +var foo = (function () { + return function suspect (name) { + "use strict"; + return method.apply(receiver, arguments); + } +})(); + +foo("a", "b", "c"); +foo("a", "b", "c"); +foo("a", "b", "c"); +%OptimizeFunctionOnNextCall(foo); +foo("a", "b", "c"); diff --git a/deps/v8/test/mjsunit/regress/regress-362870.js b/deps/v8/test/mjsunit/regress/regress-362870.js new file mode 100644 index 000000000..c8d3fe7e4 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-362870.js @@ -0,0 +1,18 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +// Adding a property via Object.defineProperty should not be taken as hint that +// we construct a dictionary, quite the opposite. +var obj = {}; + +for (var i = 0; i < 100; i++) { + Object.defineProperty(obj, "x" + i, { value: 31415 }); + Object.defineProperty(obj, "y" + i, { + get: function() { return 42; }, + set: function(value) { } + }); + assertTrue(%HasFastProperties(obj)); +} diff --git a/deps/v8/test/mjsunit/regress/regress-363956.js b/deps/v8/test/mjsunit/regress/regress-363956.js new file mode 100644 index 000000000..76d6728c0 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-363956.js @@ -0,0 +1,12 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function Fuu() { this.x = this.x.x; } +Fuu.prototype.x = {x: 1} +new Fuu(); +new Fuu(); +%OptimizeFunctionOnNextCall(Fuu); +new Fuu(); diff --git a/deps/v8/test/mjsunit/regress/regress-365172-1.js b/deps/v8/test/mjsunit/regress/regress-365172-1.js new file mode 100644 index 000000000..ea68285ea --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-365172-1.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --track-field-types + +var b1 = {d: 1}; var b2 = {d: 2}; +var f1 = {x: 1}; var f2 = {x: 2}; +f1.b = b1; +f2.x = {}; +b2.d = 4.2; +f2.b = b2; +var x = f1.x; diff --git a/deps/v8/test/mjsunit/regress/regress-365172-2.js b/deps/v8/test/mjsunit/regress/regress-365172-2.js new file mode 100644 index 000000000..265901c5c --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-365172-2.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --track-field-types + +var b1 = {d: 1}; var b2 = {d: 2}; +var f1 = {x: 1}; var f2 = {x: 2}; +f1.b = b1; +f2.x = {}; +b2.d = 4.2; +f2.b = b2; +%TryMigrateInstance(f1); diff --git a/deps/v8/test/mjsunit/regress/regress-365172-3.js b/deps/v8/test/mjsunit/regress/regress-365172-3.js new file mode 100644 index 000000000..103d3d03b --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-365172-3.js @@ -0,0 +1,14 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-gc --track-field-types + +function f1(a) { return {x:a, v:''}; } +function f2(a) { return {x:{v:a}, v:''}; } +function f3(a) { return {x:[], v:{v:''}}; } +f3([0]); +a = f1(1); +a.__defineGetter__('v', function() { gc(); return f2(this); }); +a.v; +f3(1); diff --git a/deps/v8/test/mjsunit/regress/regress-369450.js b/deps/v8/test/mjsunit/regress/regress-369450.js new file mode 100644 index 000000000..e4523619f --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-369450.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --enable-slow-asserts + +var v = [1.3]; +v.length = 0; + +var json = JSON.stringify(v); +assertEquals("[]", json); + +Array.prototype[0] = 5.5; +var arr = [].concat(v, [{}], [2.3]); +assertEquals([{}, 2.3], arr); diff --git a/deps/v8/test/mjsunit/regress/regress-370384.js b/deps/v8/test/mjsunit/regress/regress-370384.js new file mode 100644 index 000000000..28aea6921 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-370384.js @@ -0,0 +1,16 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --deopt-every-n-times=1 --no-enable_sse4_1 + +function g(f, x, name) { + var v2 = f(x); + for (var i = 0; i < 13000; i++) { + f(i); + } + var v1 = f(x); + assertEquals(v1, v2); +} + +g(Math.sin, 6.283185307179586, "Math.sin"); diff --git a/deps/v8/test/mjsunit/regress/regress-99167.js b/deps/v8/test/mjsunit/regress/regress-99167.js index 5053ae5d2..777acf448 100644 --- a/deps/v8/test/mjsunit/regress/regress-99167.js +++ b/deps/v8/test/mjsunit/regress/regress-99167.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --expose-gc --max-new-space-size=1024 +// Flags: --expose-gc --max-new-space-size=2 eval("function Node() { this.a = 1; this.a = 3; }"); new Node; diff --git a/deps/v8/test/mjsunit/regress/regress-alloc-smi-check.js b/deps/v8/test/mjsunit/regress/regress-alloc-smi-check.js new file mode 100644 index 000000000..295048a13 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-alloc-smi-check.js @@ -0,0 +1,16 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +var x = {}; + +function f(a) { + a[200000000] = x; +} + +f(new Array(100000)); +f([]); +%OptimizeFunctionOnNextCall(f); +f([]); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-1.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-1.js new file mode 100644 index 000000000..33a75634b --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-1.js @@ -0,0 +1,14 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function nope() { return false; } +var a = [ 1, 2, 3 ]; +Object.seal(a); +Object.isSealed = nope; + +assertThrows(function() { a.pop(); }, TypeError); +assertThrows(function() { a.push(5); }, TypeError); +assertThrows(function() { a.shift(); }, TypeError); +assertThrows(function() { a.unshift(5); }, TypeError); +assertThrows(function() { a.splice(0, 1); }, TypeError); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-3.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-3.js new file mode 100644 index 000000000..f5a0c39ea --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-3.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function produce_object() { + var real_length = 1; + function set_length() { real_length = "boom"; } + function get_length() { return real_length; } + var o = { __proto__:Array.prototype , 0:"x" }; + Object.defineProperty(o, "length", { set:set_length, get:get_length }) + return o; +} + +assertEquals(2, produce_object().push("y")); +assertEquals(2, produce_object().unshift("y")); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-4.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-4.js new file mode 100644 index 000000000..dbaa2454d --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-4.js @@ -0,0 +1,16 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var o = { __proto__:Array.prototype, 0:"x" }; +function boomer() { return 0; } +Object.defineProperty(o, "length", { get:boomer, set:boomer }); +Object.seal(o); + +assertDoesNotThrow(function() { o.push(1); }); +assertEquals(0, o.length); +assertEquals(1, o[0]); + +assertDoesNotThrow(function() { o.unshift(2); }); +assertEquals(0, o.length); +assertEquals(2, o[0]); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-5.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-5.js new file mode 100644 index 000000000..266e4d48b --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-5.js @@ -0,0 +1,13 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var a = [ 1, 2, 3 ]; +var was_called = false; +function poison() { was_called = true; } +a.hasOwnProperty = poison; +Object.freeze(a); + +assertThrows("a.unshift()", TypeError); +assertEquals(3, a.length); +assertFalse(was_called); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-6.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-6.js new file mode 100644 index 000000000..d926bd048 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-6.js @@ -0,0 +1,40 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Test that Array builtins can be called on primitive values. +var values = [ 23, 4.2, true, false, 0/0 ]; +for (var i = 0; i < values.length; ++i) { + var v = values[i]; + Array.prototype.join.call(v); + Array.prototype.pop.call(v); + Array.prototype.push.call(v); + Array.prototype.reverse.call(v); + Array.prototype.shift.call(v); + Array.prototype.slice.call(v); + Array.prototype.splice.call(v); + Array.prototype.unshift.call(v); +} + +// Test that ToObject on primitive values is only called once. +var length_receiver, element_receiver; +function length() { length_receiver = this; return 2; } +function element() { element_receiver = this; return "x"; } +Object.defineProperty(Number.prototype, "length", { get:length, set:length }); +Object.defineProperty(Number.prototype, "0", { get:element, set:element }); +Object.defineProperty(Number.prototype, "1", { get:element, set:element }); +Object.defineProperty(Number.prototype, "2", { get:element, set:element }); +function test_receiver(expected, call_string) { + assertDoesNotThrow(call_string); + assertEquals(new Number(expected), length_receiver); + assertSame(length_receiver, element_receiver); +} + +test_receiver(11, "Array.prototype.join.call(11)") +test_receiver(23, "Array.prototype.pop.call(23)"); +test_receiver(42, "Array.prototype.push.call(42, 'y')"); +test_receiver(49, "Array.prototype.reverse.call(49)"); +test_receiver(65, "Array.prototype.shift.call(65)"); +test_receiver(77, "Array.prototype.slice.call(77, 1)"); +test_receiver(88, "Array.prototype.splice.call(88, 1, 1)"); +test_receiver(99, "Array.prototype.unshift.call(99, 'z')"); diff --git a/deps/v8/test/mjsunit/regress/regress-builtinbust-7.js b/deps/v8/test/mjsunit/regress/regress-builtinbust-7.js new file mode 100644 index 000000000..a7c049e90 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-builtinbust-7.js @@ -0,0 +1,32 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +if ("Intl" in this) { + function overflow() { + return overflow() + 1; + } + Object.defineProperty = overflow; + assertDoesNotThrow(function() { Intl.Collator.supportedLocalesOf("en"); }); + + var date = new Date(Date.UTC(2004, 12, 25, 3, 0, 0)); + var options = { + weekday: "long", + year: "numeric", + month: "long", + day: "numeric" + }; + + Object.apply = overflow; + assertDoesNotThrow(function() { date.toLocaleDateString("de-DE", options); }); + + var options_incomplete = {}; + assertDoesNotThrow(function() { + date.toLocaleDateString("de-DE", options_incomplete); + }); + assertTrue(options_incomplete.hasOwnProperty("year")); + + assertDoesNotThrow(function() { date.toLocaleDateString("de-DE", undefined); }); + assertDoesNotThrow(function() { date.toLocaleDateString("de-DE"); }); + assertThrows(function() { date.toLocaleDateString("de-DE", null); }, TypeError); +} diff --git a/deps/v8/test/mjsunit/regress/regress-captured-object-no-dummy-use.js b/deps/v8/test/mjsunit/regress/regress-captured-object-no-dummy-use.js new file mode 100644 index 000000000..cdf548d5f --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-captured-object-no-dummy-use.js @@ -0,0 +1,18 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +var global = "10.1"; +function f() { } +function g(a) { this.d = a; } +function h() { + var x = new f(); + global.dummy = this; + var y = new g(x); +} +h(); +h(); +%OptimizeFunctionOnNextCall(h); +h(); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-357052.js b/deps/v8/test/mjsunit/regress/regress-crbug-357052.js new file mode 100644 index 000000000..9cde1b66c --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-357052.js @@ -0,0 +1,11 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function f() { + var str = ""; + for (var i = 0; i < 30; i++) { + str += "abcdefgh12345678" + str; + } +} +assertThrows(f); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-357137.js b/deps/v8/test/mjsunit/regress/regress-crbug-357137.js new file mode 100644 index 000000000..a780426f0 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-357137.js @@ -0,0 +1,8 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var locals = ""; +for (var i = 0; i < 1024; i++) locals += "var v" + i + ";"; +eval("function f() {" + locals + "f();}"); +assertThrows("f()", RangeError); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-357330.js b/deps/v8/test/mjsunit/regress/regress-crbug-357330.js new file mode 100644 index 000000000..b3edf0084 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-357330.js @@ -0,0 +1,16 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function f(foo) { + var g; + true ? (g = foo + 0) : g = null; + if (null != g) {} +}; + +f(1.4); +f(1.4); +%OptimizeFunctionOnNextCall(f); +f(1.4); diff --git a/deps/v8/test/mjsunit/regress/regress-create-exception.js b/deps/v8/test/mjsunit/regress/regress-create-exception.js index d3face9f3..e0553041a 100644 --- a/deps/v8/test/mjsunit/regress/regress-create-exception.js +++ b/deps/v8/test/mjsunit/regress/regress-create-exception.js @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --max-new-space-size=256 +// Flags: --max-new-space-size=2 "use strict"; // Check for GC bug constructing exceptions. diff --git a/deps/v8/test/mjsunit/regress/regress-empty-fixed-double-array.js b/deps/v8/test/mjsunit/regress/regress-empty-fixed-double-array.js new file mode 100644 index 000000000..1db9e2b3e --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-empty-fixed-double-array.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --enable-slow-asserts + +function f(a, x) { + a.shift(); + a[0] = x; +} + +f([1], 1.1); +f([1], 1.1); +%OptimizeFunctionOnNextCall(f); +f([1], 1.1); diff --git a/deps/v8/test/mjsunit/regress/regress-enum-prop-keys-cache-size.js b/deps/v8/test/mjsunit/regress/regress-enum-prop-keys-cache-size.js new file mode 100644 index 000000000..1227500ee --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-enum-prop-keys-cache-size.js @@ -0,0 +1,19 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --stress-compaction + +%SetAllocationTimeout(100000, 100000); + +var x = {}; +x.a = 1; +x.b = 2; +x = {}; + +var y = {}; +y.a = 1; + +%SetAllocationTimeout(100000, 0); + +for (var z in y) { } diff --git a/deps/v8/test/mjsunit/regress/regress-escape-preserve-smi-representation.js b/deps/v8/test/mjsunit/regress/regress-escape-preserve-smi-representation.js new file mode 100644 index 000000000..551147ed5 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-escape-preserve-smi-representation.js @@ -0,0 +1,35 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function deepEquals(a, b) { + if (a === b) { if (a === 0) return (1 / a) === (1 / b); return true; } + if (typeof a != typeof b) return false; + if (typeof a == "number") return isNaN(a) && isNaN(b); + if (typeof a !== "object" && typeof a !== "function") return false; + if (objectClass === "RegExp") { return (a.toString() === b.toString()); } + if (objectClass === "Function") return false; + if (objectClass === "Array") { + var elementCount = 0; + if (a.length != b.length) { return false; } + for (var i = 0; i < a.length; i++) { + if (!deepEquals(a[i], b[i])) return false; + } + return true; + } +} + + +function __f_1(){ + var __v_0 = []; + for(var i=0; i<2; i++){ + var __v_1=[]; + __v_0.push([]) + deepEquals(2, __v_0.length); + } +} +__f_1(); +%OptimizeFunctionOnNextCall(__f_1); +__f_1(); diff --git a/deps/v8/test/mjsunit/regress/regress-global-freeze-const.js b/deps/v8/test/mjsunit/regress/regress-global-freeze-const.js new file mode 100644 index 000000000..0b9e1f3eb --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-global-freeze-const.js @@ -0,0 +1,7 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +__defineSetter__('x', function() { }); +Object.freeze(this); +eval('const x = 1'); diff --git a/deps/v8/test/mjsunit/regress/regress-handle-illegal-redeclaration.js b/deps/v8/test/mjsunit/regress/regress-handle-illegal-redeclaration.js new file mode 100644 index 000000000..fe04ddb27 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-handle-illegal-redeclaration.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --always-opt + +var x = 0; + +function f() { + const c; + var c; + return 0 + x; +} + +assertThrows(f); diff --git a/deps/v8/test/mjsunit/regress/regress-inline-getter-near-stack-limit.js b/deps/v8/test/mjsunit/regress/regress-inline-getter-near-stack-limit.js new file mode 100644 index 000000000..d459a7a8d --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-inline-getter-near-stack-limit.js @@ -0,0 +1,24 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +function runNearStackLimit(f) { + function t() { + try { t(); } catch(e) { f(); } + }; + try { t(); } catch(e) {} +} + +function g(x) { return x.bar; } +function f1() { } +function f2() { } + +var x = Object.defineProperty({}, "bar", { get: f1 }); +g(x); +g(x); +var y = Object.defineProperty({}, "bar", { get: f2 }); +g(y); +%OptimizeFunctionOnNextCall(g); +runNearStackLimit(function() { g(y); }); diff --git a/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining.js b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining.js new file mode 100644 index 000000000..6cda168df --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining.js @@ -0,0 +1,24 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +"use strict"; +function f1(d) { + return 1 + f2(f3(d)); +} + +function f2(v) { return v; } + +function f3(d) { + if (d) %DeoptimizeFunction(f1); + return 2; +} + +%NeverOptimizeFunction(f3); + +f1(false); +f1(false); +%OptimizeFunctionOnNextCall(f1); +assertEquals(3, f1(true)); diff --git a/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining2.js b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining2.js new file mode 100644 index 000000000..7b73b1423 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-inlining2.js @@ -0,0 +1,24 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +"use strict"; +function f1(d) { + return 1 + f2(1, f3(d), d); +} + +function f2(v0, v1, v2) { return v1; } + +function f3(d) { + if (d) %DeoptimizeFunction(f1); + return 2; +} + +%NeverOptimizeFunction(f3); + +f1(false); +f1(false); +%OptimizeFunctionOnNextCall(f1); +assertEquals(3, f1(true)); diff --git a/deps/v8/test/mjsunit/regress/regress-load-field-by-index.js b/deps/v8/test/mjsunit/regress/regress-load-field-by-index.js new file mode 100644 index 000000000..c572c1ee3 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-load-field-by-index.js @@ -0,0 +1,22 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +var o = {a:1.5, b:{}}; + +function f(o) { + var result = []; + for (var k in o) { + result[result.length] = o[k]; + } + return result; +} + +f(o); +f(o); +%OptimizeFunctionOnNextCall(f); +var array = f(o); +o.a = 1.7; +assertEquals(1.5, array[0]); diff --git a/deps/v8/test/mjsunit/regress/regress-no-dummy-use-for-arguments-object.js b/deps/v8/test/mjsunit/regress/regress-no-dummy-use-for-arguments-object.js new file mode 100644 index 000000000..658d776ea --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-no-dummy-use-for-arguments-object.js @@ -0,0 +1,21 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function g() { + arguments.length; +} + +var global = ""; + +function f() { + global.dummy = this; + g({}); +} + +f(); +f(); +%OptimizeFunctionOnNextCall(f); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-observe-map-cache.js b/deps/v8/test/mjsunit/regress/regress-observe-map-cache.js new file mode 100644 index 000000000..4c7a7e3e9 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-observe-map-cache.js @@ -0,0 +1,14 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --enable-slow-asserts + +function f() { + var x = new Array(0); + x[-1] = -1; + Object.observe(x, function() { }); +} + +f(); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-parseint.js b/deps/v8/test/mjsunit/regress/regress-parseint.js new file mode 100644 index 000000000..05501f31f --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-parseint.js @@ -0,0 +1,18 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function f(string, radix) { + // Use a phi to force radix into heap number representation. + radix = (radix == 0) ? radix : (radix >> 0); + if (radix != 2) return NaN; + return %StringParseInt(string, radix); +} + +assertEquals(2, (-4294967294) >> 0); +assertEquals(3, f("11", -4294967294)); +assertEquals(NaN, f("11", -2147483650)); +%OptimizeFunctionOnNextCall(f); +assertEquals(3, f("11", -4294967294)); diff --git a/deps/v8/test/mozilla/mozilla.status b/deps/v8/test/mozilla/mozilla.status index 04fc6a98a..c04412ae9 100644 --- a/deps/v8/test/mozilla/mozilla.status +++ b/deps/v8/test/mozilla/mozilla.status @@ -141,8 +141,8 @@ 'ecma/Date/15.9.5.28-1': [PASS, FAIL], # 1050186: Arm/MIPS vm is broken; probably unrelated to dates - 'ecma/Array/15.4.4.5-3': [PASS, ['arch == arm or arch == mipsel', FAIL]], - 'ecma/Date/15.9.5.22-2': [PASS, ['arch == arm or arch == mipsel', FAIL]], + 'ecma/Array/15.4.4.5-3': [PASS, ['arch == arm or arch == mipsel or arch == mips', FAIL]], + 'ecma/Date/15.9.5.22-2': [PASS, ['arch == arm or arch == mipsel or arch == mips', FAIL]], # Flaky test that fails due to what appears to be a bug in the test. # Occurs depending on current time @@ -874,6 +874,25 @@ 'js1_5/GC/regress-203278-2': [PASS, TIMEOUT, NO_VARIANTS], }], # 'arch == mipsel' +['arch == mips', { + + # BUG(3251229): Times out when running new crankshaft test script. + 'ecma_3/RegExp/regress-311414': [SKIP], + 'ecma/Date/15.9.5.8': [SKIP], + 'ecma/Date/15.9.5.10-2': [SKIP], + 'ecma/Date/15.9.5.11-2': [SKIP], + 'ecma/Date/15.9.5.12-2': [SKIP], + 'js1_5/Array/regress-99120-02': [SKIP], + 'js1_5/extensions/regress-371636': [SKIP], + 'js1_5/Regress/regress-203278-1': [SKIP], + 'js1_5/Regress/regress-404755': [SKIP], + 'js1_5/Regress/regress-451322': [SKIP], + + + # BUG(1040): Allow this test to timeout. + 'js1_5/GC/regress-203278-2': [PASS, TIMEOUT, NO_VARIANTS], +}], # 'arch == mips' + ['arch == arm64 and simulator_run == True', { 'js1_5/GC/regress-203278-2': [SKIP], diff --git a/deps/v8/test/promises-aplus/promises-aplus.status b/deps/v8/test/promises-aplus/promises-aplus.status index c68eae96e..fdcf40b13 100644 --- a/deps/v8/test/promises-aplus/promises-aplus.status +++ b/deps/v8/test/promises-aplus/promises-aplus.status @@ -28,7 +28,5 @@ [ [ALWAYS, { - # http://crbug.com/347455 - '2.2.7': FAIL }], # ALWAYS ] diff --git a/deps/v8/test/promises-aplus/testcfg.py b/deps/v8/test/promises-aplus/testcfg.py index 99495e6fe..a5995a361 100644 --- a/deps/v8/test/promises-aplus/testcfg.py +++ b/deps/v8/test/promises-aplus/testcfg.py @@ -85,7 +85,6 @@ class PromiseAplusTestSuite(testsuite.TestSuite): def GetSourceForTest(self, testcase): filename = os.path.join(self.root, TEST_NAME, 'lib', 'tests', testcase.path + '.js') - return 'print("FAIL: fail");' with open(filename) as f: return f.read() diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status index b844bdca5..247bd5cb6 100644 --- a/deps/v8/test/test262/test262.status +++ b/deps/v8/test/test262/test262.status @@ -99,7 +99,7 @@ 'S15.1.3.2_A2.5_T1': [PASS, ['mode == debug', SKIP]], }], # ALWAYS -['arch == arm or arch == mipsel or arch == arm64', { +['arch == arm or arch == mipsel or arch == mips or arch == arm64', { # TODO(mstarzinger): Causes stack overflow on simulators due to eager # compilation of parenthesized function literals. Needs investigation. @@ -112,5 +112,5 @@ 'S15.1.3.2_A2.5_T1': [SKIP], 'S15.1.3.3_A2.3_T1': [SKIP], 'S15.1.3.4_A2.3_T1': [SKIP], -}], # 'arch == arm or arch == mipsel or arch == arm64' +}], # 'arch == arm or arch == mipsel or arch == mips or arch == arm64' ] diff --git a/deps/v8/test/webkit/fast/js/arguments-expected.txt b/deps/v8/test/webkit/fast/js/arguments-expected.txt index ce1b383f5..f5bbb7293 100644 --- a/deps/v8/test/webkit/fast/js/arguments-expected.txt +++ b/deps/v8/test/webkit/fast/js/arguments-expected.txt @@ -157,7 +157,7 @@ PASS access_after_delete_extra_5(1, 2, 3, 4, 5) is 5 PASS argumentsParam(true) is true PASS argumentsFunctionConstructorParam(true) is true PASS argumentsVarUndefined() is '[object Arguments]' -FAIL argumentsConstUndefined() should be [object Arguments]. Threw exception TypeError: Variable 'arguments' has already been declared +FAIL argumentsConstUndefined() should be [object Arguments]. Threw exception TypeError: Identifier 'arguments' has already been declared PASS argumentCalleeInException() is argumentCalleeInException PASS shadowedArgumentsApply([true]) is true PASS shadowedArgumentsLength([]) is 0 diff --git a/deps/v8/test/webkit/fast/js/modify-non-references-expected.txt b/deps/v8/test/webkit/fast/js/modify-non-references-expected.txt index b8c692aa8..80f273086 100644 --- a/deps/v8/test/webkit/fast/js/modify-non-references-expected.txt +++ b/deps/v8/test/webkit/fast/js/modify-non-references-expected.txt @@ -21,18 +21,18 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -FAIL function f() { g()++; } f.toString() should be function f() { g()++; }. Threw exception ReferenceError: Invalid left-hand side expression in postfix operation -FAIL function f() { g()--; } f.toString() should be function f() { g()--; }. Threw exception ReferenceError: Invalid left-hand side expression in postfix operation -FAIL function f() { ++g(); } f.toString() should be function f() { ++g(); }. Threw exception ReferenceError: Invalid left-hand side expression in prefix operation -FAIL function f() { --g(); } f.toString() should be function f() { --g(); }. Threw exception ReferenceError: Invalid left-hand side expression in prefix operation -FAIL function f() { g() = 1; } f.toString() should be function f() { g() = 1; }. Threw exception ReferenceError: Invalid left-hand side in assignment -FAIL function f() { g() += 1; } f.toString() should be function f() { g() += 1; }. Threw exception ReferenceError: Invalid left-hand side in assignment -FAIL g()++ should throw ReferenceError: Postfix ++ operator applied to value that is not a reference.. Threw exception ReferenceError: Invalid left-hand side expression in postfix operation. -FAIL g()-- should throw ReferenceError: Postfix -- operator applied to value that is not a reference.. Threw exception ReferenceError: Invalid left-hand side expression in postfix operation. -FAIL ++g() should throw ReferenceError: Prefix ++ operator applied to value that is not a reference.. Threw exception ReferenceError: Invalid left-hand side expression in prefix operation. -FAIL --g() should throw ReferenceError: Prefix -- operator applied to value that is not a reference.. Threw exception ReferenceError: Invalid left-hand side expression in prefix operation. -FAIL g() = 1 should throw ReferenceError: Left side of assignment is not a reference.. Threw exception ReferenceError: Invalid left-hand side in assignment. -FAIL g() += 1 should throw ReferenceError: Left side of assignment is not a reference.. Threw exception ReferenceError: Invalid left-hand side in assignment. +PASS function f() { g()++; } f.toString() is 'function f() { g()++; }' +PASS function f() { g()--; } f.toString() is 'function f() { g()--; }' +PASS function f() { ++g(); } f.toString() is 'function f() { ++g(); }' +PASS function f() { --g(); } f.toString() is 'function f() { --g(); }' +PASS function f() { g() = 1; } f.toString() is 'function f() { g() = 1; }' +PASS function f() { g() += 1; } f.toString() is 'function f() { g() += 1; }' +PASS Number()++ threw exception ReferenceError: Invalid left-hand side expression in postfix operation. +PASS Number()-- threw exception ReferenceError: Invalid left-hand side expression in postfix operation. +PASS ++Number() threw exception ReferenceError: Invalid left-hand side expression in prefix operation. +PASS --Number() threw exception ReferenceError: Invalid left-hand side expression in prefix operation. +PASS Number() = 1 threw exception ReferenceError: Invalid left-hand side in assignment. +PASS Number() += 1 threw exception ReferenceError: Invalid left-hand side in assignment. PASS successfullyParsed is true TEST COMPLETE diff --git a/deps/v8/test/webkit/fast/js/modify-non-references.js b/deps/v8/test/webkit/fast/js/modify-non-references.js index e8b9682e7..bd9b7aa96 100644 --- a/deps/v8/test/webkit/fast/js/modify-non-references.js +++ b/deps/v8/test/webkit/fast/js/modify-non-references.js @@ -27,9 +27,9 @@ shouldBe("function f() { ++g(); } f.toString()", "'function f() { ++g(); }'"); shouldBe("function f() { --g(); } f.toString()", "'function f() { --g(); }'"); shouldBe("function f() { g() = 1; } f.toString()", "'function f() { g() = 1; }'"); shouldBe("function f() { g() += 1; } f.toString()", "'function f() { g() += 1; }'"); -shouldThrow("g()++", "'ReferenceError: Postfix ++ operator applied to value that is not a reference.'"); -shouldThrow("g()--", "'ReferenceError: Postfix -- operator applied to value that is not a reference.'"); -shouldThrow("++g()", "'ReferenceError: Prefix ++ operator applied to value that is not a reference.'"); -shouldThrow("--g()", "'ReferenceError: Prefix -- operator applied to value that is not a reference.'"); -shouldThrow("g() = 1", "'ReferenceError: Left side of assignment is not a reference.'"); -shouldThrow("g() += 1", "'ReferenceError: Left side of assignment is not a reference.'"); +shouldThrow("Number()++", "'ReferenceError: Invalid left-hand side expression in postfix operation'"); +shouldThrow("Number()--", "'ReferenceError: Invalid left-hand side expression in postfix operation'"); +shouldThrow("++Number()", "'ReferenceError: Invalid left-hand side expression in prefix operation'"); +shouldThrow("--Number()", "'ReferenceError: Invalid left-hand side expression in prefix operation'"); +shouldThrow("Number() = 1", "'ReferenceError: Invalid left-hand side in assignment'"); +shouldThrow("Number() += 1", "'ReferenceError: Invalid left-hand side in assignment'"); diff --git a/deps/v8/test/webkit/resources/JSON-stringify.js b/deps/v8/test/webkit/resources/JSON-stringify.js new file mode 100644 index 000000000..0c406f3bd --- /dev/null +++ b/deps/v8/test/webkit/resources/JSON-stringify.js @@ -0,0 +1,529 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Copyright (C) 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// +// THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY +// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY +// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +function createTests() { + var simpleArray = ['a', 'b', 'c']; + var simpleObject = {a:"1", b:"2", c:"3"}; + var complexArray = ['a', 'b', 'c',,,simpleObject, simpleArray, [simpleObject,simpleArray]]; + var complexObject = {a:"1", b:"2", c:"3", d:undefined, e:null, "":12, get f(){ return simpleArray; }, array: complexArray}; + var simpleArrayWithProto = ['d', 'e', 'f']; + simpleArrayWithProto.__proto__ = simpleObject; + var simpleObjectWithProto = {d:"4", e:"5", f:"6", __proto__:simpleObject}; + var complexArrayWithProto = ['d', 'e', 'f',,,simpleObjectWithProto, simpleArrayWithProto, [simpleObjectWithProto,simpleArrayWithProto]]; + complexArrayWithProto.__proto__ = simpleObjectWithProto; + var complexObjectWithProto = {d:"4", e:"5", f:"6", g:undefined, h:null, "":12, get i(){ return simpleArrayWithProto; }, array2: complexArrayWithProto, __proto__:complexObject}; + var objectWithSideEffectGetter = {get b() {this.foo=1;}}; + var objectWithSideEffectGetterAndProto = {__proto__:{foo:"bar"}, get b() {this.foo=1;}}; + var arrayWithSideEffectGetter = []; + arrayWithSideEffectGetter.__defineGetter__("b", function(){this.foo=1;}); + var arrayWithSideEffectGetterAndProto = []; + arrayWithSideEffectGetterAndProto.__defineGetter__("b", function(){this.foo=1;}); + arrayWithSideEffectGetterAndProto.__proto__ = {foo:"bar"}; + var result = []; + result.push(function(jsonObject){ + return jsonObject.stringify(1); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(1.5); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(-1); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(-1.5); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(null); + }); + result.push(function(jsonObject){ + return jsonObject.stringify("string"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Number(0)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Number(1)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Number(1.5)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Number(-1)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Number(-1.5)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new String("a string object")); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Boolean(true)); + }); + result.push(function(jsonObject){ + var value = new Number(1); + value.valueOf = function() { return 2; } + return jsonObject.stringify(value); + }); + result[result.length - 1].expected = '2'; + result.push(function(jsonObject){ + var value = new Boolean(true); + value.valueOf = function() { return 2; } + return jsonObject.stringify(value); + }); + result[result.length - 1].expected = '2'; + result.push(function(jsonObject){ + var value = new String("fail"); + value.toString = function() { return "converted string"; } + return jsonObject.stringify(value); + }); + result[result.length - 1].expected = '"converted string"'; + result.push(function(jsonObject){ + return jsonObject.stringify(true); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(false); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Date(0)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON}); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ return "custom toISOString"; }}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ return {}; }}); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ throw "An exception"; }}); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + var d = new Date(0); + d.toISOString = null; + return jsonObject.stringify(d); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + var d = new Date(0); + d.toJSON = undefined; + return jsonObject.stringify(d); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({get Foo() { return "bar"; }}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({get Foo() { this.foo="wibble"; return "bar"; }}); + }); + result.push(function(jsonObject){ + var count = 0; + jsonObject.stringify({get Foo() { count++; return "bar"; }}); + return count; + }); + result.push(function(jsonObject){ + var count = 0; + return jsonObject.stringify({get Foo() { count++; delete this.bar; return "bar"; }, bar: "wibble"}); + }); + result.push(function(jsonObject){ + var count = 0; + return jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}); + }); + result.push(function(jsonObject){ + var allString = true; + jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}, function(k,v){allString = allString && (typeof k == "string"); return v}); + return allString; + }); + result.push(function(jsonObject){ + var allString = true; + jsonObject.stringify([1,2,3,4,5], function(k,v){allString = allString && (typeof k == "string"); return v}); + return allString; + }); + result.push(function(jsonObject){ + var allString = true; + var array = []; + return jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}, array); + }); + result.push(function(jsonObject){ + var allString = true; + var array = ["a"]; + return jsonObject.stringify({get a(){return 1;array[1]="b";array[2]="c"}, b:"2", c:"3"}, array); + }); + result.push(function(jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + return jsonObject.stringify(simpleObject, array); + }); + result.push(function(jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + return jsonObject.stringify(simpleObjectWithProto, array); + }); + result.push(function(jsonObject){ + var allString = true; + var array = [1, new Number(2), NaN, Infinity, -Infinity, new String("str")]; + return jsonObject.stringify({"1":"1","2":"2","NaN":"NaN","Infinity":"Infinity","-Infinity":"-Infinity","str":"str"}, array); + }); + result[result.length - 1].expected = '{"1":"1","2":"2","NaN":"NaN","Infinity":"Infinity","-Infinity":"-Infinity","str":"str"}'; + result.push(function(jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify({1:'a', 2:'b', 3:'c'}, array); + }); + result.push(function(jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify(simpleArray, array); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArray, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArray, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArray, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArray, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, 10); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, 11); + }); + result[result.length - 1].expected = JSON.stringify(simpleObject, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + }); + result[result.length - 1].expected = JSON.stringify(simpleObject, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + }); + result[result.length - 1].expected = JSON.stringify(simpleObject, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArray, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArray, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArray, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArray, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObject, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObject, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObject, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObject, null, 4); + }); + result.push(function(jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify(simpleArrayWithProto, array); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 10); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 11); + }); + result[result.length - 1].expected = JSON.stringify(simpleObjectWithProto, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + }); + result[result.length - 1].expected = JSON.stringify(simpleObjectWithProto, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + }); + result[result.length - 1].expected = JSON.stringify(simpleObjectWithProto, null, 10); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, " "); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, "ab"); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, 4); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(objectWithSideEffectGetter); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(objectWithSideEffectGetterAndProto); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(arrayWithSideEffectGetter); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(arrayWithSideEffectGetterAndProto); + }); + var replaceTracker; + function replaceFunc(key, value) { + replaceTracker += key + "("+(typeof key)+")" + JSON.stringify(value) + ";"; + return value; + } + result.push(function(jsonObject){ + replaceTracker = ""; + jsonObject.stringify([1,2,3,,,,4,5,6], replaceFunc); + return replaceTracker; + }); + result[result.length - 1].expected = '(string)[1,2,3,null,null,null,4,5,6];0(number)1;1(number)2;2(number)3;3(number)undefined;4(number)undefined;5(number)undefined;6(number)4;7(number)5;8(number)6;' + result.push(function(jsonObject){ + replaceTracker = ""; + jsonObject.stringify({a:"a", b:"b", c:"c", 3: "d", 2: "e", 1: "f"}, replaceFunc); + return replaceTracker; + }); + result[result.length - 1].expected = '(string){"1":"f","2":"e","3":"d","a":"a","b":"b","c":"c"};1(string)"f";2(string)"e";3(string)"d";a(string)"a";b(string)"b";c(string)"c";'; + result.push(function(jsonObject){ + var count = 0; + var array = [{toString:function(){count++; array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + jsonObject.stringify(simpleObject, array); + return count; + }); + result.push(function(jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}, 'b', 'c']; + return jsonObject.stringify(simpleObject, array); + }); + result.push(function(jsonObject){ + var count = 0; + var array = [{toString:function(){count++; array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}, 'b', 'c']; + jsonObject.stringify(simpleObject, array); + return count; + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:"1", get b() { this.a="foo"; return "getter"; }, c:"3"}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:"1", get b() { this.c="foo"; return "getter"; }, c:"3"}); + }); + result.push(function(jsonObject){ + var setterCalled = false; + jsonObject.stringify({a:"1", set b(s) { setterCalled = true; return "setter"; }, c:"3"}); + return setterCalled; + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:"1", get b(){ return "getter"; }, set b(s) { return "setter"; }, c:"3"}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(new Array(10)); + }); + result.push(function(jsonObject){ + return jsonObject.stringify([undefined,,null,0,false]); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({p1:undefined,p2:null,p3:0,p4:false}); + }); + var cycleTracker = ""; + var cyclicObject = { get preSelf1() { cycleTracker+="preSelf1,"; return "preSelf1"; }, + preSelf2: {toJSON:function(){cycleTracker+="preSelf2,"; return "preSelf2"}}, + self: [], + get postSelf1() { cycleTracker+="postSelf1,"; return "postSelf1" }, + postSelf2: {toJSON:function(){cycleTracker+="postSelf2,"; return "postSelf2"}}, + toJSON : function(key) { cycleTracker += key + "("+(typeof key)+"):" + this; return this; } + }; + cyclicObject.self = cyclicObject; + result.push(function(jsonObject){ + cycleTracker = ""; + return jsonObject.stringify(cyclicObject); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + cycleTracker = ""; + try { jsonObject.stringify(cyclicObject); } catch(e) { cycleTracker += " -> exception" } + return cycleTracker; + }); + result[result.length - 1].expected = "(string):[object Object]preSelf1,preSelf2,self(string):[object Object] -> exception" + var cyclicArray = [{toJSON : function(key,value) { cycleTracker += key + "("+(typeof key)+"):" + this; cycleTracker += "first,"; return this; }}, + cyclicArray, + {toJSON : function(key,value) { cycleTracker += key + "("+(typeof key)+"):" + this; cycleTracker += "second,"; return this; }}]; + cyclicArray[1] = cyclicArray; + result.push(function(jsonObject){ + cycleTracker = ""; + return jsonObject.stringify(cyclicArray); + }); + result[result.length - 1].throws = true; + result.push(function(jsonObject){ + cycleTracker = ""; + try { jsonObject.stringify(cyclicArray); } catch(e) { cycleTracker += " -> exception" } + return cycleTracker; + }); + result[result.length - 1].expected = "0(number):[object Object]first, -> exception"; + function createArray(len, o) { var r = []; for (var i = 0; i < len; i++) r[i] = o; return r; } + var getterCalls; + var magicObject = createArray(10, {abcdefg: [1,2,5,"ab", null, undefined, true, false,,], + get calls() {return ++getterCalls; }, + "123":createArray(15, "foo"), + "":{a:"b"}}); + result.push(function(jsonObject){ + getterCalls = 0; + return jsonObject.stringify(magicObject) + " :: getter calls = " + getterCalls; + }); + result.push(function(jsonObject){ + return jsonObject.stringify(undefined); + }); + result.push(function(jsonObject){ + return jsonObject.stringify(null); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON:function(){ return undefined; }}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({toJSON:function(){ return null; }}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify([{toJSON:function(){ return undefined; }}]); + }); + result.push(function(jsonObject){ + return jsonObject.stringify([{toJSON:function(){ return null; }}]); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return undefined; }}}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return null; }}}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return function(){}; }}}); + }); + result.push(function(jsonObject){ + return jsonObject.stringify({a:function(){}}); + }); + result.push(function(jsonObject){ + var deepObject = {}; + for (var i = 0; i < 1024; i++) + deepObject = {next:deepObject}; + return jsonObject.stringify(deepObject); + }); + result.push(function(jsonObject){ + var deepArray = []; + for (var i = 0; i < 1024; i++) + deepArray = [deepArray]; + return jsonObject.stringify(deepArray); + }); + result.push(function(jsonObject){ + var depth = 0; + function toDeepVirtualJSONObject() { + if (++depth >= 1024) + return {}; + var r = {}; + r.toJSON = toDeepVirtualJSONObject; + return {recurse: r}; + } + return jsonObject.stringify(toDeepVirtualJSONObject()); + }); + result.push(function(jsonObject){ + var depth = 0; + function toDeepVirtualJSONArray() { + if (++depth >= 1024) + return []; + var r = []; + r.toJSON = toDeepJSONArray; + return [r]; + } + return jsonObject.stringify(toDeepVirtualJSONArray()); + }); + var fullCharsetString = ""; + for (var i = 0; i < 65536; i++) + fullCharsetString += String.fromCharCode(i); + result.push(function(jsonObject){ + return jsonObject.stringify(fullCharsetString); + }); + return result; +} +var tests = createTests(); +for (var i = 0; i < tests.length; i++) { + try { + debug(tests[i]); + if (tests[i].throws) + shouldThrow('tests[i](nativeJSON)'); + else if (tests[i].expected) + shouldBe('tests[i](nativeJSON)', "tests[i].expected"); + else + shouldBe('tests[i](nativeJSON)', "tests[i](JSON)"); + }catch(e){} +} diff --git a/deps/v8/test/webkit/resources/json2-es5-compat.js b/deps/v8/test/webkit/resources/json2-es5-compat.js new file mode 100644 index 000000000..b71656f00 --- /dev/null +++ b/deps/v8/test/webkit/resources/json2-es5-compat.js @@ -0,0 +1,481 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +/* + http://www.JSON.org/json2.js + 2009-04-16 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the object holding the key. + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*jslint evil: true */ + +/*global JSON */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +if (!this.JSON) { + JSON = {}; +} +(function () { + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + if (typeof Date.prototype.toJSON !== 'function') { + + Date.prototype.toJSON = function (key) { + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? + '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' ? c : + '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : + '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key]; + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + if (value && ((typeof value) === "object")) { + if (value.constructor === String || value.constructor === Number || value.constructor === Boolean) + value = value.valueOf(); + } + + switch (typeof value) { + case 'string': + return quote(value); + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 ? '[]' : + gap ? '[\n' + gap + + partial.join(',\n' + gap) + '\n' + + mind + ']' : + '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + k = rep[i]; + if (typeof k === 'string') { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 ? '{}' : + gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + + mind + '}' : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } + + +// If the JSON object does not yet have a parse method, give it one. + + if (typeof JSON.parse !== 'function') { + JSON.parse = function (text, reviver) { + +// The parse method takes a text and an optional reviver function, and returns +// a JavaScript value if the text is a valid JSON text. + + var j; + + function walk(holder, key) { + +// The walk method is used to recursively walk the resulting structure so +// that modifications can be made. + + var k, v, value = holder[key]; + if (value && typeof value === 'object') { + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + } + } + } + return reviver.call(holder, key, value); + } + + +// Parsing happens in four stages. In the first stage, we replace certain +// Unicode characters with escape sequences. JavaScript handles many characters +// incorrectly, either silently deleting them, or treating them as line endings. + + cx.lastIndex = 0; + if (cx.test(text)) { + text = text.replace(cx, function (a) { + return '\\u' + + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }); + } + +// In the second stage, we run the text against regular expressions that look +// for non-JSON patterns. We are especially concerned with '()' and 'new' +// because they can cause invocation, and '=' because it can cause mutation. +// But just to be safe, we want to reject all unexpected forms. + +// We split the second stage into 4 regexp operations in order to work around +// crippling inefficiencies in IE's and Safari's regexp engines. First we +// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we +// replace all simple value tokens with ']' characters. Third, we delete all +// open brackets that follow a colon or comma or that begin the text. Finally, +// we look to see that the remaining characters are only whitespace or ']' or +// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. + + if (/^[\],:{}\s]*$/. +test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@'). +replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']'). +replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { + +// In the third stage we use the eval function to compile the text into a +// JavaScript structure. The '{' operator is subject to a syntactic ambiguity +// in JavaScript: it can begin a block or an object literal. We wrap the text +// in parens to eliminate the ambiguity. + + j = eval('(' + text + ')'); + +// In the optional fourth stage, we recursively walk the new structure, passing +// each name/value pair to a reviver function for possible transformation. + + return typeof reviver === 'function' ? + walk({'': j}, '') : j; + } + +// If the text is not JSON parseable, then a SyntaxError is thrown. + + throw new SyntaxError('JSON.parse'); + }; + } +}()); diff --git a/deps/v8/test/webkit/run-json-stringify-expected.txt b/deps/v8/test/webkit/run-json-stringify-expected.txt new file mode 100644 index 000000000..45d55c7ca --- /dev/null +++ b/deps/v8/test/webkit/run-json-stringify-expected.txt @@ -0,0 +1,544 @@ +function (jsonObject){ + return jsonObject.stringify(1); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(1.5); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(-1); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(-1.5); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(null); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify("string"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Number(0)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Number(1)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Number(1.5)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Number(-1)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Number(-1.5)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new String("a string object")); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Boolean(true)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var value = new Number(1); + value.valueOf = function() { return 2; } + return jsonObject.stringify(value); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + var value = new Boolean(true); + value.valueOf = function() { return 2; } + return jsonObject.stringify(value); + } +FAIL tests[i](nativeJSON) should be 2. Was true. +function (jsonObject){ + var value = new String("fail"); + value.toString = function() { return "converted string"; } + return jsonObject.stringify(value); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(true); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(false); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Date(0)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON}); + } +PASS tests[i](nativeJSON) threw exception TypeError: undefined is not a function. +function (jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ return "custom toISOString"; }}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ return {}; }}); + } +FAIL tests[i](nativeJSON) should throw an exception. Was {}. +function (jsonObject){ + return jsonObject.stringify({toJSON: Date.prototype.toJSON, toISOString: function(){ throw "An exception"; }}); + } +PASS tests[i](nativeJSON) threw exception An exception. +function (jsonObject){ + var d = new Date(0); + d.toISOString = null; + return jsonObject.stringify(d); + } +PASS tests[i](nativeJSON) threw exception TypeError: object is not a function. +function (jsonObject){ + var d = new Date(0); + d.toJSON = undefined; + return jsonObject.stringify(d); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({get Foo() { return "bar"; }}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({get Foo() { this.foo="wibble"; return "bar"; }}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var count = 0; + jsonObject.stringify({get Foo() { count++; return "bar"; }}); + return count; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var count = 0; + return jsonObject.stringify({get Foo() { count++; delete this.bar; return "bar"; }, bar: "wibble"}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var count = 0; + return jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}, function(k,v){allString = allString && (typeof k == "string"); return v}); + return allString; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + jsonObject.stringify([1,2,3,4,5], function(k,v){allString = allString && (typeof k == "string"); return v}); + return allString; + } +FAIL tests[i](nativeJSON) should be false. Was true. +function (jsonObject){ + var allString = true; + var array = []; + return jsonObject.stringify({a:"1", b:"2", c:"3", 5:4, 4:5, 2:6, 1:7}, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = ["a"]; + return jsonObject.stringify({get a(){return 1;array[1]="b";array[2]="c"}, b:"2", c:"3"}, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + return jsonObject.stringify(simpleObject, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + return jsonObject.stringify(simpleObjectWithProto, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = [1, new Number(2), NaN, Infinity, -Infinity, new String("str")]; + return jsonObject.stringify({"1":"1","2":"2","NaN":"NaN","Infinity":"Infinity","-Infinity":"-Infinity","str":"str"}, array); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify({1:'a', 2:'b', 3:'c'}, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify(simpleArray, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArray, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArray, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArray, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArray, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, 10); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, 11); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(simpleObject, null, " "); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(complexArray, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArray, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArray, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArray, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObject, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObject, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObject, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObject, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = ["1","2","3"]; + return jsonObject.stringify(simpleArrayWithProto, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleArrayWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 10); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, 11); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(simpleObjectWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexArrayWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, " "); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, "ab"); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(complexObjectWithProto, null, 4); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(objectWithSideEffectGetter); + } +FAIL tests[i](nativeJSON) should be {"foo":1}. Was {}. +function (jsonObject){ + return jsonObject.stringify(objectWithSideEffectGetterAndProto); + } +FAIL tests[i](nativeJSON) should be {"foo":1}. Was {}. +function (jsonObject){ + return jsonObject.stringify(arrayWithSideEffectGetter); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(arrayWithSideEffectGetterAndProto); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + replaceTracker = ""; + jsonObject.stringify([1,2,3,,,,4,5,6], replaceFunc); + return replaceTracker; + } +FAIL tests[i](nativeJSON) should be (string)[1,2,3,null,null,null,4,5,6];0(number)1;1(number)2;2(number)3;3(number)undefined;4(number)undefined;5(number)undefined;6(number)4;7(number)5;8(number)6;. Was (string)[1,2,3,null,null,null,4,5,6];0(string)1;1(string)2;2(string)3;3(string)undefined;4(string)undefined;5(string)undefined;6(string)4;7(string)5;8(string)6;. +function (jsonObject){ + replaceTracker = ""; + jsonObject.stringify({a:"a", b:"b", c:"c", 3: "d", 2: "e", 1: "f"}, replaceFunc); + return replaceTracker; + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + var count = 0; + var array = [{toString:function(){count++; array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}]; + jsonObject.stringify(simpleObject, array); + return count; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var allString = true; + var array = [{toString:function(){array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}, 'b', 'c']; + return jsonObject.stringify(simpleObject, array); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var count = 0; + var array = [{toString:function(){count++; array[0]='a'; array[1]='c'; array[2]='b'; return 'a'}}, 'b', 'c']; + jsonObject.stringify(simpleObject, array); + return count; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:"1", get b() { this.a="foo"; return "getter"; }, c:"3"}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:"1", get b() { this.c="foo"; return "getter"; }, c:"3"}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var setterCalled = false; + jsonObject.stringify({a:"1", set b(s) { setterCalled = true; return "setter"; }, c:"3"}); + return setterCalled; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:"1", get b(){ return "getter"; }, set b(s) { return "setter"; }, c:"3"}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(new Array(10)); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify([undefined,,null,0,false]); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({p1:undefined,p2:null,p3:0,p4:false}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + cycleTracker = ""; + return jsonObject.stringify(cyclicObject); + } +PASS tests[i](nativeJSON) threw exception TypeError: Converting circular structure to JSON. +function (jsonObject){ + cycleTracker = ""; + try { jsonObject.stringify(cyclicObject); } catch(e) { cycleTracker += " -> exception" } + return cycleTracker; + } +PASS tests[i](nativeJSON) is tests[i].expected +function (jsonObject){ + cycleTracker = ""; + return jsonObject.stringify(cyclicArray); + } +PASS tests[i](nativeJSON) threw exception TypeError: Converting circular structure to JSON. +function (jsonObject){ + cycleTracker = ""; + try { jsonObject.stringify(cyclicArray); } catch(e) { cycleTracker += " -> exception" } + return cycleTracker; + } +FAIL tests[i](nativeJSON) should be 0(number):[object Object]first, -> exception. Was 0(string):[object Object]first, -> exception. +function (jsonObject){ + getterCalls = 0; + return jsonObject.stringify(magicObject) + " :: getter calls = " + getterCalls; + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(undefined); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify(null); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({toJSON:function(){ return undefined; }}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({toJSON:function(){ return null; }}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify([{toJSON:function(){ return undefined; }}]); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify([{toJSON:function(){ return null; }}]); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return undefined; }}}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return null; }}}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:{toJSON:function(){ return function(){}; }}}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + return jsonObject.stringify({a:function(){}}); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var deepObject = {}; + for (var i = 0; i < 1024; i++) + deepObject = {next:deepObject}; + return jsonObject.stringify(deepObject); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var deepArray = []; + for (var i = 0; i < 1024; i++) + deepArray = [deepArray]; + return jsonObject.stringify(deepArray); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var depth = 0; + function toDeepVirtualJSONObject() { + if (++depth >= 1024) + return {}; + var r = {}; + r.toJSON = toDeepVirtualJSONObject; + return {recurse: r}; + } + return jsonObject.stringify(toDeepVirtualJSONObject()); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +function (jsonObject){ + var depth = 0; + function toDeepVirtualJSONArray() { + if (++depth >= 1024) + return []; + var r = []; + r.toJSON = toDeepJSONArray; + return [r]; + } + return jsonObject.stringify(toDeepVirtualJSONArray()); + } +function (jsonObject){ + return jsonObject.stringify(fullCharsetString); + } +PASS tests[i](nativeJSON) is tests[i](JSON) +PASS successfullyParsed is true + +TEST COMPLETE + diff --git a/deps/v8/test/webkit/run-json-stringify.js b/deps/v8/test/webkit/run-json-stringify.js new file mode 100644 index 000000000..b2fbdeb5e --- /dev/null +++ b/deps/v8/test/webkit/run-json-stringify.js @@ -0,0 +1,8 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +var nativeJSON = this.JSON; +this.JSON = null; +load("test/webkit/resources/json2-es5-compat.js"); +load("test/webkit/resources/JSON-stringify.js"); diff --git a/deps/v8/test/webkit/webkit.status b/deps/v8/test/webkit/webkit.status index 8ae5e3dfc..a6bf845d0 100644 --- a/deps/v8/test/webkit/webkit.status +++ b/deps/v8/test/webkit/webkit.status @@ -30,9 +30,6 @@ # BUG(237872). TODO(bmeurer): Investigate. 'string-replacement-outofmemory': [FAIL], - # TODO(rossberg): Awaiting spec resolution (https://bugs.ecmascript.org/show_bug.cgi?id=2566) - 'fast/js/Promise-then': [FAIL], - ############################################################################## # Flaky tests. # BUG(v8:2989). diff --git a/deps/v8/tools/blink_tests/TestExpectations b/deps/v8/tools/blink_tests/TestExpectations index 530f85347..728906f43 100644 --- a/deps/v8/tools/blink_tests/TestExpectations +++ b/deps/v8/tools/blink_tests/TestExpectations @@ -1,5 +1,4 @@ # Tests that sometimes fail only on the V8 waterfall: -[ Linux Release x86 ] fast/js/JSON-stringify.html [ Pass Failure Slow ] [ Linux Release x86 ] fast/text/atsui-multiple-renderers.html [ Pass Failure Slow ] [ Linux Release x86 ] fast/text/international/complex-joining-using-gpos.html [ Pass Failure Slow ] [ Linux Release x86 ] fast/text/international/danda-space.html [ Pass Failure Slow ] diff --git a/deps/v8/tools/common-includes.sh b/deps/v8/tools/common-includes.sh deleted file mode 100644 index 7785e9fc3..000000000 --- a/deps/v8/tools/common-includes.sh +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2012 the V8 project authors. All rights reserved. -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This file contains common function definitions for various other shell -# scripts in this directory. It is not meant to be executed by itself. - -# Important: before including this file, the following variables must be set: -# - BRANCHNAME -# - PERSISTFILE_BASENAME - -TEMP_BRANCH=$BRANCHNAME-temporary-branch-created-by-script -VERSION_FILE="src/version.cc" -CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry" -PATCH_FILE="$PERSISTFILE_BASENAME-patch" -COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg" -TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision" -START_STEP=0 -CURRENT_STEP=0 - -die() { - [[ -n "$1" ]] && echo "Error: $1" - echo "Exiting." - exit 1 -} - -confirm() { - echo -n "$1 [Y/n] " - read ANSWER - if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then - return 0 - else - return 1 - fi -} - -delete_branch() { - local MATCH=$(git branch | grep "$1" | awk '{print $NF}' | grep -x $1) - if [ "$MATCH" == "$1" ] ; then - confirm "Branch $1 exists, do you want to delete it?" - if [ $? -eq 0 ] ; then - git branch -D $1 || die "Deleting branch '$1' failed." - echo "Branch $1 deleted." - else - die "Can't continue. Please delete branch $1 and try again." - fi - fi -} - -# Persist and restore variables to support canceling/resuming execution -# of this script. -persist() { - local VARNAME=$1 - local FILE="$PERSISTFILE_BASENAME-$VARNAME" - local VALUE="${!VARNAME}" - if [ -z "$VALUE" ] ; then - VALUE="__EMPTY__" - fi - echo "$VALUE" > $FILE -} - -restore() { - local VARNAME=$1 - local FILE="$PERSISTFILE_BASENAME-$VARNAME" - local VALUE="$(cat $FILE)" - [[ -z "$VALUE" ]] && die "Variable '$VARNAME' could not be restored." - if [ "$VALUE" == "__EMPTY__" ] ; then - VALUE="" - fi - eval "$VARNAME=\"$VALUE\"" -} - -restore_if_unset() { - local VARNAME=$1 - [[ -z "${!VARNAME}" ]] && restore "$VARNAME" -} - -initial_environment_checks() { - # Cancel if this is not a git checkout. - [[ -d .git ]] \ - || die "This is not a git checkout, this script won't work for you." - - # Cancel if EDITOR is unset or not executable. - [[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \ - || die "Please set your EDITOR environment variable, you'll need it." -} - -common_prepare() { - # Check for a clean workdir. - [[ -z "$(git status -s -uno)" ]] \ - || die "Workspace is not clean. Please commit or undo your changes." - - # Persist current branch. - CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}') - persist "CURRENT_BRANCH" - - # Fetch unfetched revisions. - git svn fetch || die "'git svn fetch' failed." - - # Get ahold of a safe temporary branch and check it out. - if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then - delete_branch $TEMP_BRANCH - git checkout -b $TEMP_BRANCH - fi - - # Delete the branch that will be created later if it exists already. - delete_branch $BRANCHNAME -} - -common_cleanup() { - restore_if_unset "CURRENT_BRANCH" - git checkout -f $CURRENT_BRANCH - [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH - [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME - # Clean up all temporary files. - rm -f "$PERSISTFILE_BASENAME"* -} - -# These two functions take a prefix for the variable names as first argument. -read_and_persist_version() { - for v in MAJOR_VERSION MINOR_VERSION BUILD_NUMBER PATCH_LEVEL; do - VARNAME="$1${v%%_*}" - VALUE=$(grep "#define $v" "$VERSION_FILE" | awk '{print $NF}') - eval "$VARNAME=\"$VALUE\"" - persist "$VARNAME" - done -} -restore_version_if_unset() { - for v in MAJOR MINOR BUILD PATCH; do - restore_if_unset "$1$v" - done -} - -upload_step() { - let CURRENT_STEP+=1 - if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Upload for code review." - echo -n "Please enter the email address of a V8 reviewer for your patch: " - read REVIEWER - git cl upload -r "$REVIEWER" --send-mail \ - || die "'git cl upload' failed, please try again." - fi -} - -wait_for_lgtm() { - echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \ -change. (If you need to iterate on the patch or double check that it's \ -sane, do so in another shell, but remember to not change the headline of \ -the uploaded CL." - unset ANSWER - while [ "$ANSWER" != "LGTM" ] ; do - [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'." - echo -n "> " - read ANSWER - done -} - -wait_for_resolving_conflicts() { - echo "Applying the patch \"$1\" failed. Either type \"ABORT<Return>\", or \ -resolve the conflicts, stage *all* touched files with 'git add', and \ -type \"RESOLVED<Return>\"" - unset ANSWER - while [ "$ANSWER" != "RESOLVED" ] ; do - [[ "$ANSWER" == "ABORT" ]] && die "Applying the patch failed." - [[ -n "$ANSWER" ]] && echo "That was not 'RESOLVED' or 'ABORT'." - echo -n "> " - read ANSWER - done -} - -# Takes a file containing the patch to apply as first argument. -apply_patch() { - git apply --index --reject $REVERSE_PATCH "$1" || \ - wait_for_resolving_conflicts "$1"; -} diff --git a/deps/v8/tools/gcmole/gcmole.lua b/deps/v8/tools/gcmole/gcmole.lua index cd91a913d..f1980a459 100644 --- a/deps/v8/tools/gcmole/gcmole.lua +++ b/deps/v8/tools/gcmole/gcmole.lua @@ -116,7 +116,7 @@ function InvokeClangPluginForEachFile(filenames, cfg, func) cfg.arch_define) for _, filename in ipairs(filenames) do log("-- %s", filename) - local action = cmd_line .. " src/" .. filename .. " 2>&1" + local action = cmd_line .. " " .. filename .. " 2>&1" if FLAGS.verbose then print('popen ', action) end local pipe = io.popen(action) func(filename, pipe:lines()) @@ -129,19 +129,26 @@ end -- GYP file parsing local function ParseGYPFile() - local f = assert(io.open("tools/gyp/v8.gyp"), "failed to open GYP file") - local gyp = f:read('*a') - f:close() + local gyp = "" + local gyp_files = { "tools/gyp/v8.gyp", "test/cctest/cctest.gyp" } + for i = 1, #gyp_files do + local f = assert(io.open(gyp_files[i]), "failed to open GYP file") + local t = f:read('*a') + gyp = gyp .. t + f:close() + end local result = {} for condition, sources in gyp:gmatch "'sources': %[.-### gcmole%((.-)%) ###(.-)%]" do - local files = {} + if result[condition] == nil then result[condition] = {} end for file in sources:gmatch "'%.%./%.%./src/([^']-%.cc)'" do - table.insert(files, file) + table.insert(result[condition], "src/" .. file) + end + for file in sources:gmatch "'(test-[^']-%.cc)'" do + table.insert(result[condition], "test/cctest/" .. file) end - result[condition] = files end return result diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp index f7bdf52b9..e6a5bd14e 100644 --- a/deps/v8/tools/gyp/v8.gyp +++ b/deps/v8/tools/gyp/v8.gyp @@ -237,6 +237,9 @@ { 'target_name': 'v8_base.<(v8_target_arch)', 'type': 'static_library', + 'dependencies': [ + 'v8_libbase.<(v8_target_arch)', + ], 'variables': { 'optimize': 'max', }, @@ -474,6 +477,7 @@ '../../src/mark-compact.h', '../../src/messages.cc', '../../src/messages.h', + '../../src/msan.h', '../../src/natives.h', '../../src/objects-debug.cc', '../../src/objects-inl.h', @@ -569,6 +573,7 @@ '../../src/transitions.h', '../../src/type-info.cc', '../../src/type-info.h', + '../../src/types-inl.h', '../../src/types.cc', '../../src/types.h', '../../src/typing.cc', @@ -585,21 +590,16 @@ '../../src/utils.h', '../../src/utils/random-number-generator.cc', '../../src/utils/random-number-generator.h', - '../../src/v8-counters.cc', - '../../src/v8-counters.h', '../../src/v8.cc', '../../src/v8.h', '../../src/v8checks.h', - '../../src/v8conversions.cc', - '../../src/v8conversions.h', '../../src/v8globals.h', '../../src/v8memory.h', '../../src/v8threads.cc', '../../src/v8threads.h', - '../../src/v8utils.cc', - '../../src/v8utils.h', '../../src/variables.cc', '../../src/variables.h', + '../../src/vector.h', '../../src/version.cc', '../../src/version.h', '../../src/vm-state-inl.h', @@ -694,7 +694,7 @@ '../../src/arm64/utils-arm64.h', ], }], - ['v8_target_arch=="ia32" or v8_target_arch=="mac" or OS=="mac"', { + ['v8_target_arch=="ia32"', { 'sources': [ ### gcmole(arch:ia32) ### '../../src/ia32/assembler-ia32-inl.h', '../../src/ia32/assembler-ia32.cc', @@ -725,7 +725,7 @@ '../../src/ia32/stub-cache-ia32.cc', ], }], - ['v8_target_arch=="mipsel"', { + ['v8_target_arch=="mips" or v8_target_arch=="mipsel"', { 'sources': [ ### gcmole(arch:mipsel) ### '../../src/mips/assembler-mips.cc', '../../src/mips/assembler-mips.h', @@ -759,7 +759,7 @@ '../../src/mips/stub-cache-mips.cc', ], }], - ['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', { + ['v8_target_arch=="x64"', { 'sources': [ ### gcmole(arch:x64) ### '../../src/x64/assembler-x64-inl.h', '../../src/x64/assembler-x64.cc', @@ -1015,12 +1015,6 @@ '<(icu_gyp_path):icudata', ], }], - ['v8_use_default_platform==0', { - 'sources!': [ - '../../src/default-platform.cc', - '../../src/default-platform.h', - ], - }], ['icu_use_data_file_flag==1', { 'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE'], }, { # else icu_use_data_file_flag !=1 @@ -1035,6 +1029,33 @@ ], }, { + 'target_name': 'v8_libbase.<(v8_target_arch)', + # TODO(jochen): Should be a static library once it has sources in it. + 'type': 'none', + 'variables': { + 'optimize': 'max', + }, + 'include_dirs+': [ + '../../src', + ], + 'sources': [ + '../../src/base/macros.h', + ], + 'conditions': [ + ['want_separate_host_toolset==1', { + 'toolsets': ['host', 'target'], + }, { + 'toolsets': ['target'], + }], + ['component=="shared_library"', { + 'defines': [ + 'BUILDING_V8_SHARED', + 'V8_SHARED', + ], + }], + ], + }, + { 'target_name': 'js2c', 'type': 'none', 'conditions': [ diff --git a/deps/v8/tools/js2c.py b/deps/v8/tools/js2c.py index f67d053ad..171821092 100755 --- a/deps/v8/tools/js2c.py +++ b/deps/v8/tools/js2c.py @@ -32,24 +32,23 @@ # library. import os, re, sys, string +import optparse import jsmin import bz2 +import textwrap -def ToCAsciiArray(lines): - result = [] - for chr in lines: - value = ord(chr) - assert value < 128 - result.append(str(value)) - return ", ".join(result) +class Error(Exception): + def __init__(self, msg): + Exception.__init__(self, msg) -def ToCArray(lines): +def ToCArray(byte_sequence): result = [] - for chr in lines: + for chr in byte_sequence: result.append(str(ord(chr))) - return ", ".join(result) + joined = ", ".join(result) + return textwrap.fill(joined, 80) def RemoveCommentsAndTrailingWhitespace(lines): @@ -68,46 +67,19 @@ def ReadFile(filename): return lines -def ReadLines(filename): - result = [] - for line in open(filename, "rt"): - if '#' in line: - line = line[:line.index('#')] - line = line.strip() - if len(line) > 0: - result.append(line) - return result - - -def LoadConfigFrom(name): - import ConfigParser - config = ConfigParser.ConfigParser() - config.read(name) - return config - - -def ParseValue(string): - string = string.strip() - if string.startswith('[') and string.endswith(']'): - return string.lstrip('[').rstrip(']').split() - else: - return string - - EVAL_PATTERN = re.compile(r'\beval\s*\(') WITH_PATTERN = re.compile(r'\bwith\s*\(') - -def Validate(lines, file): - lines = RemoveCommentsAndTrailingWhitespace(lines) +def Validate(lines): # Because of simplified context setup, eval and with is not # allowed in the natives files. - eval_match = EVAL_PATTERN.search(lines) - if eval_match: - raise ("Eval disallowed in natives: %s" % file) - with_match = WITH_PATTERN.search(lines) - if with_match: - raise ("With statements disallowed in natives: %s" % file) + if EVAL_PATTERN.search(lines): + raise Error("Eval disallowed in natives.") + if WITH_PATTERN.search(lines): + raise Error("With statements disallowed in natives.") + + # Pass lines through unchanged. + return lines def ExpandConstants(lines, constants): @@ -187,7 +159,7 @@ PYTHON_MACRO_PATTERN = re.compile(r'^python\s+macro\s+([a-zA-Z0-9_]+)\s*\(([^)]* def ReadMacros(lines): constants = [] macros = [] - for line in lines: + for line in lines.split('\n'): hash = line.find('#') if hash != -1: line = line[:hash] line = line.strip() @@ -213,13 +185,13 @@ def ReadMacros(lines): fun = eval("lambda " + ",".join(args) + ': ' + body) macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun))) else: - raise ("Illegal line: " + line) + raise Error("Illegal line: " + line) return (constants, macros) INLINE_MACRO_PATTERN = re.compile(r'macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*\n') INLINE_MACRO_END_PATTERN = re.compile(r'endmacro\s*\n') -def ExpandInlineMacros(lines, filename): +def ExpandInlineMacros(lines): pos = 0 while True: macro_match = INLINE_MACRO_PATTERN.search(lines, pos) @@ -230,7 +202,7 @@ def ExpandInlineMacros(lines, filename): args = [match.strip() for match in macro_match.group(2).split(',')] end_macro_match = INLINE_MACRO_END_PATTERN.search(lines, macro_match.end()); if end_macro_match is None: - raise ("Macro %s unclosed in %s" % (name, filename)) + raise Error("Macro %s unclosed" % name) body = lines[macro_match.end():end_macro_match.start()] # remove macro definition @@ -245,6 +217,7 @@ def ExpandInlineMacros(lines, filename): return s lines = ExpandMacroDefinition(lines, pos, name_pattern, macro, non_expander) + HEADER_TEMPLATE = """\ // Copyright 2011 Google Inc. All Rights Reserved. @@ -259,7 +232,7 @@ HEADER_TEMPLATE = """\ namespace v8 { namespace internal { - static const byte sources[] = { %(sources_data)s }; +%(sources_declaration)s\ %(raw_sources_declaration)s\ @@ -311,6 +284,10 @@ namespace internal { } // v8 """ +SOURCES_DECLARATION = """\ + static const byte sources[] = { %s }; +""" + RAW_SOURCES_COMPRESSION_DECLARATION = """\ static const char* raw_sources = NULL; @@ -336,97 +313,202 @@ GET_SCRIPT_NAME_CASE = """\ if (index == %(i)i) return Vector<const char>("%(name)s", %(length)i); """ -def JS2C(source, target, env): - ids = [] - debugger_ids = [] - modules = [] - # Locate the macros file name. - consts = [] - macros = [] - for s in source: - if 'macros.py' == (os.path.split(str(s))[1]): - (consts, macros) = ReadMacros(ReadLines(str(s))) - else: - modules.append(s) - - minifier = jsmin.JavaScriptMinifier() - - module_offset = 0 - all_sources = [] - for module in modules: - filename = str(module) - debugger = filename.endswith('-debugger.js') - lines = ReadFile(filename) - lines = ExpandConstants(lines, consts) - lines = ExpandMacros(lines, macros) - lines = RemoveCommentsAndTrailingWhitespace(lines) - lines = ExpandInlineMacros(lines, filename) - Validate(lines, filename) - lines = minifier.JSMinify(lines) - id = (os.path.split(filename)[1])[:-3] - if debugger: id = id[:-9] - raw_length = len(lines) - if debugger: - debugger_ids.append((id, raw_length, module_offset)) - else: - ids.append((id, raw_length, module_offset)) - all_sources.append(lines) - module_offset += raw_length - total_length = raw_total_length = module_offset - - if env['COMPRESSION'] == 'off': - raw_sources_declaration = RAW_SOURCES_DECLARATION - sources_data = ToCAsciiArray("".join(all_sources)) + +def BuildFilterChain(macro_filename): + """Build the chain of filter functions to be applied to the sources. + + Args: + macro_filename: Name of the macro file, if any. + + Returns: + A function (string -> string) that reads a source file and processes it. + """ + filter_chain = [ReadFile] + + if macro_filename: + (consts, macros) = ReadMacros(ReadFile(macro_filename)) + filter_chain.append(lambda l: ExpandConstants(l, consts)) + filter_chain.append(lambda l: ExpandMacros(l, macros)) + + filter_chain.extend([ + RemoveCommentsAndTrailingWhitespace, + ExpandInlineMacros, + Validate, + jsmin.JavaScriptMinifier().JSMinify + ]) + + def chain(f1, f2): + return lambda x: f2(f1(x)) + + return reduce(chain, filter_chain) + + +class Sources: + def __init__(self): + self.names = [] + self.modules = [] + self.is_debugger_id = [] + + +def IsDebuggerFile(filename): + return filename.endswith("-debugger.js") + +def IsMacroFile(filename): + return filename.endswith("macros.py") + + +def PrepareSources(source_files): + """Read, prepare and assemble the list of source files. + + Args: + sources: List of Javascript-ish source files. A file named macros.py + will be treated as a list of macros. + + Returns: + An instance of Sources. + """ + macro_file = None + macro_files = filter(IsMacroFile, source_files) + assert len(macro_files) in [0, 1] + if macro_files: + source_files.remove(macro_files[0]) + macro_file = macro_files[0] + + filters = BuildFilterChain(macro_file) + + # Sort 'debugger' sources first. + source_files = sorted(source_files, + lambda l,r: IsDebuggerFile(r) - IsDebuggerFile(l)) + + result = Sources() + for source in source_files: + try: + lines = filters(source) + except Error as e: + raise Error("In file %s:\n%s" % (source, str(e))) + + result.modules.append(lines); + + is_debugger = IsDebuggerFile(source) + result.is_debugger_id.append(is_debugger); + + name = os.path.basename(source)[:-3] + result.names.append(name if not is_debugger else name[:-9]); + return result + + +def BuildMetadata(sources, source_bytes, native_type, omit): + """Build the meta data required to generate a libaries file. + + Args: + sources: A Sources instance with the prepared sources. + source_bytes: A list of source bytes. + (The concatenation of all sources; might be compressed.) + native_type: The parameter for the NativesCollection template. + omit: bool, whether we should omit the sources in the output. + + Returns: + A dictionary for use with HEADER_TEMPLATE. + """ + total_length = len(source_bytes) + raw_sources = "".join(sources.modules) + + # The sources are expected to be ASCII-only. + assert not filter(lambda value: ord(value) >= 128, raw_sources) + + # Loop over modules and build up indices into the source blob: + get_index_cases = [] + get_script_name_cases = [] + get_raw_script_source_cases = [] + offset = 0 + for i in xrange(len(sources.modules)): + native_name = "native %s.js" % sources.names[i] + d = { + "i": i, + "id": sources.names[i], + "name": native_name, + "length": len(native_name), + "offset": offset, + "raw_length": len(sources.modules[i]), + } + get_index_cases.append(GET_INDEX_CASE % d) + get_script_name_cases.append(GET_SCRIPT_NAME_CASE % d) + get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % d) + offset += len(sources.modules[i]) + assert offset == len(raw_sources) + + # If we have the raw sources we can declare them accordingly. + have_raw_sources = source_bytes == raw_sources and not omit + raw_sources_declaration = (RAW_SOURCES_DECLARATION + if have_raw_sources else RAW_SOURCES_COMPRESSION_DECLARATION) + + metadata = { + "builtin_count": len(sources.modules), + "debugger_count": sum(sources.is_debugger_id), + "sources_declaration": SOURCES_DECLARATION % ToCArray(source_bytes), + "sources_data": ToCArray(source_bytes) if not omit else "", + "raw_sources_declaration": raw_sources_declaration, + "raw_total_length": sum(map(len, sources.modules)), + "total_length": total_length, + "get_index_cases": "".join(get_index_cases), + "get_raw_script_source_cases": "".join(get_raw_script_source_cases), + "get_script_name_cases": "".join(get_script_name_cases), + "type": native_type, + } + return metadata + + +def CompressMaybe(sources, compression_type): + """Take the prepared sources and generate a sequence of bytes. + + Args: + sources: A Sources instance with the prepared sourced. + compression_type: string, describing the desired compression. + + Returns: + A sequence of bytes. + """ + sources_bytes = "".join(sources.modules) + if compression_type == "off": + return sources_bytes + elif compression_type == "bz2": + return bz2.compress(sources_bytes) else: - raw_sources_declaration = RAW_SOURCES_COMPRESSION_DECLARATION - if env['COMPRESSION'] == 'bz2': - all_sources = bz2.compress("".join(all_sources)) - total_length = len(all_sources) - sources_data = ToCArray(all_sources) - - # Build debugger support functions - get_index_cases = [ ] - get_raw_script_source_cases = [ ] - get_script_name_cases = [ ] - - i = 0 - for (id, raw_length, module_offset) in debugger_ids + ids: - native_name = "native %s.js" % id - get_index_cases.append(GET_INDEX_CASE % { 'id': id, 'i': i }) - get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % { - 'offset': module_offset, - 'raw_length': raw_length, - 'i': i - }) - get_script_name_cases.append(GET_SCRIPT_NAME_CASE % { - 'name': native_name, - 'length': len(native_name), - 'i': i - }) - i = i + 1 - - # Emit result - output = open(str(target[0]), "w") - output.write(HEADER_TEMPLATE % { - 'builtin_count': len(ids) + len(debugger_ids), - 'debugger_count': len(debugger_ids), - 'sources_data': sources_data, - 'raw_sources_declaration': raw_sources_declaration, - 'raw_total_length': raw_total_length, - 'total_length': total_length, - 'get_index_cases': "".join(get_index_cases), - 'get_raw_script_source_cases': "".join(get_raw_script_source_cases), - 'get_script_name_cases': "".join(get_script_name_cases), - 'type': env['TYPE'] - }) + raise Error("Unknown compression type %s." % compression_type) + + +def JS2C(source, target, native_type, compression_type, raw_file, omit): + sources = PrepareSources(source) + sources_bytes = CompressMaybe(sources, compression_type) + metadata = BuildMetadata(sources, sources_bytes, native_type, omit) + + # Optionally emit raw file. + if raw_file: + output = open(raw_file, "w") + output.write(sources_bytes) + output.close() + + # Emit resulting source file. + output = open(target, "w") + output.write(HEADER_TEMPLATE % metadata) output.close() + def main(): - natives = sys.argv[1] - type = sys.argv[2] - compression = sys.argv[3] - source_files = sys.argv[4:] - JS2C(source_files, [natives], { 'TYPE': type, 'COMPRESSION': compression }) + parser = optparse.OptionParser() + parser.add_option("--raw", action="store", + help="file to write the processed sources array to.") + parser.add_option("--omit", dest="omit", action="store_true", + help="Omit the raw sources from the generated code.") + parser.set_usage("""js2c out.cc type compression sources.js ... + out.cc: C code to be generated. + type: type parameter for NativesCollection template. + compression: type of compression used. [off|bz2] + sources.js: JS internal sources or macros.py.""") + (options, args) = parser.parse_args() + + JS2C(args[3:], args[0], args[1], args[2], options.raw, options.omit) + if __name__ == "__main__": main() diff --git a/deps/v8/tools/lexer-shell.cc b/deps/v8/tools/lexer-shell.cc index e2e4a9c25..273cdd9f4 100644 --- a/deps/v8/tools/lexer-shell.cc +++ b/deps/v8/tools/lexer-shell.cc @@ -67,16 +67,14 @@ class BaselineScanner { Handle<String> result = isolate->factory()->NewStringFromTwoByte( Vector<const uint16_t>( reinterpret_cast<const uint16_t*>(source_), - length / 2)); - CHECK_NOT_EMPTY_HANDLE(isolate, result); + length / 2)).ToHandleChecked(); stream_ = new GenericStringUtf16CharacterStream(result, 0, result->length()); break; } case LATIN1: { Handle<String> result = isolate->factory()->NewStringFromOneByte( - Vector<const uint8_t>(source_, length)); - CHECK_NOT_EMPTY_HANDLE(isolate, result); + Vector<const uint8_t>(source_, length)).ToHandleChecked(); stream_ = new GenericStringUtf16CharacterStream(result, 0, result->length()); break; diff --git a/deps/v8/tools/merge-to-branch.sh b/deps/v8/tools/merge-to-branch.sh deleted file mode 100755 index 4e8a86c83..000000000 --- a/deps/v8/tools/merge-to-branch.sh +++ /dev/null @@ -1,342 +0,0 @@ -#!/bin/bash -# Copyright 2012 the V8 project authors. All rights reserved. -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -########## Global variable definitions - -BRANCHNAME=prepare-merge -PERSISTFILE_BASENAME=/tmp/v8-merge-to-branch-tempfile -ALREADY_MERGING_SENTINEL_FILE="$PERSISTFILE_BASENAME-already-merging" -COMMIT_HASHES_FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES" -TEMPORARY_PATCH_FILE="$PERSISTFILE_BASENAME-temporary-patch" - -########## Function definitions - -source $(dirname $BASH_SOURCE)/common-includes.sh - -usage() { -cat << EOF -usage: $0 [OPTIONS]... [BRANCH] [REVISION]... - -Performs the necessary steps to merge revisions from bleeding_edge -to other branches, including trunk. - -OPTIONS: - -h Show this message - -s Specify the step where to start work. Default: 0. - -p Specify a patch file to apply as part of the merge - -m Specify a commit message for the patch - -r Reverse specified patches -EOF -} - -persist_patch_commit_hashes() { - echo "PATCH_COMMIT_HASHES=( ${PATCH_COMMIT_HASHES[@]} )" > $COMMIT_HASHES_FILE -} - -restore_patch_commit_hashes() { - source $COMMIT_HASHES_FILE -} - -restore_patch_commit_hashes_if_unset() { - [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && restore_patch_commit_hashes - [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && [[ -z "$EXTRA_PATCH" ]] && \ - die "Variable PATCH_COMMIT_HASHES could not be restored." -} - -########## Option parsing -REVERT_FROM_BLEEDING_EDGE=0 - -while getopts ":hs:fp:rm:R" OPTION ; do - case $OPTION in - h) usage - exit 0 - ;; - p) EXTRA_PATCH=$OPTARG - ;; - f) rm -f "$ALREADY_MERGING_SENTINEL_FILE" - ;; - r) REVERSE_PATCH="--reverse" - ;; - m) NEW_COMMIT_MSG=$OPTARG - ;; - s) START_STEP=$OPTARG - ;; - R) REVERSE_PATCH="--reverse" - REVERT_FROM_BLEEDING_EDGE=1 - ;; - ?) echo "Illegal option: -$OPTARG" - usage - exit 1 - ;; - esac -done -let OPTION_COUNT=$OPTIND-1 -shift $OPTION_COUNT - -########## Regular workflow - -# If there is a merge in progress, abort. -[[ -e "$ALREADY_MERGING_SENTINEL_FILE" ]] && [[ $START_STEP -eq 0 ]] \ - && die "A merge is already in progress" -touch "$ALREADY_MERGING_SENTINEL_FILE" - -initial_environment_checks - -if [ $START_STEP -le $CURRENT_STEP ] ; then - let MIN_EXPECTED_ARGS=2-$REVERT_FROM_BLEEDING_EDGE - if [ ${#@} -lt $MIN_EXPECTED_ARGS ] ; then - if [ -z "$EXTRA_PATCH" ] ; then - die "Either a patch file or revision numbers must be specified" - fi - if [ -z "$NEW_COMMIT_MSG" ] ; then - die "You must specify a merge comment if no patches are specified" - fi - fi - echo ">>> Step $CURRENT_STEP: Preparation" - if [ $REVERT_FROM_BLEEDING_EDGE -eq 1 ] ; then - MERGE_TO_BRANCH="bleeding_edge" - else - MERGE_TO_BRANCH=$1 - [[ -n "$MERGE_TO_BRANCH" ]] || die "Please specify a branch to merge to" - shift - fi - persist "MERGE_TO_BRANCH" - common_prepare -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Create a fresh branch for the patch." - restore_if_unset "MERGE_TO_BRANCH" - git checkout -b $BRANCHNAME svn/$MERGE_TO_BRANCH \ - || die "Creating branch $BRANCHNAME failed." -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Search for corresponding architecture ports." - for REVISION in "$@" ; do - # Add the revision to the array if it isn't already added. - if [[ ! "${FULL_REVISION_LIST[@]}" =~ (^| )$REVISION($| ) ]] ; then - FULL_REVISION_LIST=("${FULL_REVISION_LIST[@]}" "$REVISION") - fi - # Search for commits which matches the "Port rXXX" pattern. - GIT_HASHES=$(git log svn/bleeding_edge --reverse \ - --format=%H --grep="Port r$REVISION") - if [ -n "$GIT_HASHES" ]; then - while read -r NEXT_GIT_HASH; do - NEXT_SVN_REVISION=$(git svn find-rev $NEXT_GIT_HASH svn/bleeding_edge) - [[ -n "$NEXT_SVN_REVISION" ]] \ - || die "Cannot determine svn revision for $NEXT_GIT_HASH" - FULL_REVISION_LIST=("${FULL_REVISION_LIST[@]}" "$NEXT_SVN_REVISION") - REVISION_TITLE=$(git log -1 --format=%s $NEXT_GIT_HASH) - # Is this revision included in the original revision list? - if [[ $@ =~ (^| )$NEXT_SVN_REVISION($| ) ]] ; then - echo "Found port of r$REVISION -> \ -r$NEXT_SVN_REVISION (already included): $REVISION_TITLE" - else - echo "Found port of r$REVISION -> \ -r$NEXT_SVN_REVISION: $REVISION_TITLE" - PORT_REVISION_LIST=("${PORT_REVISION_LIST[@]}" "$NEXT_SVN_REVISION") - fi - done <<< "$GIT_HASHES" - fi - done - # Next step expects a list, not an array. - FULL_REVISION_LIST="${FULL_REVISION_LIST[@]}" - # Do we find any port? - if [ ${#PORT_REVISION_LIST[@]} -ne 0 ] ; then - confirm "Automatically add corresponding ports (${PORT_REVISION_LIST[*]})?" - #: 'n': Restore the original revision list. - if [ $? -ne 0 ] ; then - FULL_REVISION_LIST="$@" - fi - fi - persist "FULL_REVISION_LIST" -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Find the git \ -revisions associated with the patches." - restore_if_unset "FULL_REVISION_LIST" - current=0 - for REVISION in $FULL_REVISION_LIST ; do - NEXT_HASH=$(git svn find-rev "r$REVISION" svn/bleeding_edge) - [[ -n "$NEXT_HASH" ]] \ - || die "Cannot determine git hash for r$REVISION" - PATCH_COMMIT_HASHES[$current]="$NEXT_HASH" - [[ -n "$REVISION_LIST" ]] && REVISION_LIST="$REVISION_LIST," - REVISION_LIST="$REVISION_LIST r$REVISION" - let current+=1 - done - if [ -n "$REVISION_LIST" ] ; then - if [ -n "$REVERSE_PATCH" ] ; then - if [ $REVERT_FROM_BLEEDING_EDGE -eq 0 ] ; then - NEW_COMMIT_MSG="Rollback of$REVISION_LIST in $MERGE_TO_BRANCH branch." - else - NEW_COMMIT_MSG="Revert$REVISION_LIST." - fi - else - NEW_COMMIT_MSG="Merged$REVISION_LIST into $MERGE_TO_BRANCH branch." - fi; - fi; - - echo "$NEW_COMMIT_MSG" > $COMMITMSG_FILE - echo "" >> $COMMITMSG_FILE - for HASH in ${PATCH_COMMIT_HASHES[@]} ; do - PATCH_MERGE_DESCRIPTION=$(git log -1 --format=%s $HASH) - echo "$PATCH_MERGE_DESCRIPTION" >> $COMMITMSG_FILE - echo "" >> $COMMITMSG_FILE - done - for HASH in ${PATCH_COMMIT_HASHES[@]} ; do - BUG=$(git log -1 $HASH | grep "BUG=" | awk -F '=' '{print $NF}') - if [ -n "$BUG" ] ; then - [[ -n "$BUG_AGGREGATE" ]] && BUG_AGGREGATE="$BUG_AGGREGATE," - BUG_AGGREGATE="$BUG_AGGREGATE$BUG" - fi - done - if [ -n "$BUG_AGGREGATE" ] ; then - echo "BUG=$BUG_AGGREGATE" >> $COMMITMSG_FILE - echo "LOG=N" >> $COMMITMSG_FILE - fi - persist "NEW_COMMIT_MSG" - persist "REVISION_LIST" - persist_patch_commit_hashes -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Apply patches for selected revisions." - restore_if_unset "MERGE_TO_BRANCH" - restore_patch_commit_hashes_if_unset "PATCH_COMMIT_HASHES" - for HASH in ${PATCH_COMMIT_HASHES[@]} ; do - echo "Applying patch for $HASH to $MERGE_TO_BRANCH..." - git log -1 -p $HASH > "$TEMPORARY_PATCH_FILE" - apply_patch "$TEMPORARY_PATCH_FILE" - done - if [ -n "$EXTRA_PATCH" ] ; then - apply_patch "$EXTRA_PATCH" - fi -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] && [ $REVERT_FROM_BLEEDING_EDGE -eq 0 ] ; then - echo ">>> Step $CURRENT_STEP: Prepare $VERSION_FILE." - # These version numbers are used again for creating the tag - read_and_persist_version -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] && [ $REVERT_FROM_BLEEDING_EDGE -eq 0 ] ; then - echo ">>> Step $CURRENT_STEP: Increment version number." - restore_if_unset "PATCH" - NEWPATCH=$(($PATCH + 1)) - confirm "Automatically increment PATCH_LEVEL? (Saying 'n' will fire up \ -your EDITOR on $VERSION_FILE so you can make arbitrary changes. When \ -you're done, save the file and exit your EDITOR.)" - if [ $? -eq 0 ] ; then - echo $NEWPATCH $VERSION_FILE - sed -e "/#define PATCH_LEVEL/s/[0-9]*$/$NEWPATCH/" \ - -i.bak "$VERSION_FILE" || die "Could not increment patch level" - else - $EDITOR "$VERSION_FILE" - fi - read_and_persist_version "NEW" -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Commit to local branch." - git commit -a -F "$COMMITMSG_FILE" \ - || die "'git commit -a' failed." -fi - -upload_step - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Commit to the repository." - restore_if_unset "MERGE_TO_BRANCH" - git checkout $BRANCHNAME \ - || die "cannot ensure that the current branch is $BRANCHNAME" - wait_for_lgtm - PRESUBMIT_TREE_CHECK="skip" git cl presubmit \ - || die "presubmit failed" - PRESUBMIT_TREE_CHECK="skip" git cl dcommit --bypass-hooks \ - || die "failed to commit to $MERGE_TO_BRANCH" -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] && [ $REVERT_FROM_BLEEDING_EDGE -eq 0 ] ; then - echo ">>> Step $CURRENT_STEP: Determine svn commit revision" - restore_if_unset "NEW_COMMIT_MSG" - restore_if_unset "MERGE_TO_BRANCH" - git svn fetch || die "'git svn fetch' failed." - COMMIT_HASH=$(git log -1 --format=%H --grep="$NEW_COMMIT_MSG" \ - svn/$MERGE_TO_BRANCH) - [[ -z "$COMMIT_HASH" ]] && die "Unable to map git commit to svn revision" - SVN_REVISION=$(git svn find-rev $COMMIT_HASH) - echo "subversion revision number is r$SVN_REVISION" - persist "SVN_REVISION" -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] && [ $REVERT_FROM_BLEEDING_EDGE -eq 0 ] ; then - echo ">>> Step $CURRENT_STEP: Create the tag." - restore_if_unset "SVN_REVISION" - restore_version_if_unset "NEW" - echo "Creating tag svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH" - if [ "$MERGE_TO_BRANCH" == "trunk" ] ; then - TO_URL="$MERGE_TO_BRANCH" - else - TO_URL="branches/$MERGE_TO_BRANCH" - fi - svn copy -r $SVN_REVISION \ - https://v8.googlecode.com/svn/$TO_URL \ - https://v8.googlecode.com/svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH \ - -m "Tagging version $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH" - persist "TO_URL" -fi - -let CURRENT_STEP+=1 -if [ $START_STEP -le $CURRENT_STEP ] ; then - echo ">>> Step $CURRENT_STEP: Cleanup." - restore_if_unset "SVN_REVISION" - restore_if_unset "TO_URL" - restore_if_unset "REVISION_LIST" - restore_version_if_unset "NEW" - common_cleanup - if [ $REVERT_FROM_BLEEDING_EDGE==0 ] ; then - echo "*** SUMMARY ***" - echo "version: $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH" - echo "branch: $TO_URL" - echo "svn revision: $SVN_REVISION" - [[ -n "$REVISION_LIST" ]] && echo "patches:$REVISION_LIST" - fi -fi diff --git a/deps/v8/tools/oom_dump/oom_dump.cc b/deps/v8/tools/oom_dump/oom_dump.cc index 5dfb5dff3..60e068537 100644 --- a/deps/v8/tools/oom_dump/oom_dump.cc +++ b/deps/v8/tools/oom_dump/oom_dump.cc @@ -32,8 +32,6 @@ #include <google_breakpad/processor/minidump.h> -#define ENABLE_DEBUGGER_SUPPORT - #include <v8.h> namespace { diff --git a/deps/v8/tools/parser-shell.cc b/deps/v8/tools/parser-shell.cc index 4da15fc7e..2d95918a3 100644 --- a/deps/v8/tools/parser-shell.cc +++ b/deps/v8/tools/parser-shell.cc @@ -44,15 +44,9 @@ using namespace v8::internal; -enum TestMode { - PreParseAndParse, - PreParse, - Parse -}; - std::pair<TimeDelta, TimeDelta> RunBaselineParser( const char* fname, Encoding encoding, int repeat, v8::Isolate* isolate, - v8::Handle<v8::Context> context, TestMode test_mode) { + v8::Handle<v8::Context> context) { int length = 0; const byte* source = ReadFileAndRepeat(fname, &length, repeat); v8::Handle<v8::String> source_handle; @@ -73,42 +67,41 @@ std::pair<TimeDelta, TimeDelta> RunBaselineParser( break; } } - v8::ScriptData* cached_data = NULL; - TimeDelta preparse_time, parse_time; - if (test_mode == PreParseAndParse || test_mode == PreParse) { + TimeDelta parse_time1, parse_time2; + Handle<Script> script = Isolate::Current()->factory()->NewScript( + v8::Utils::OpenHandle(*source_handle)); + i::ScriptData* cached_data_impl = NULL; + // First round of parsing (produce data to cache). + { + CompilationInfoWithZone info(script); + info.MarkAsGlobal(); + info.SetCachedData(&cached_data_impl, i::PRODUCE_CACHED_DATA); ElapsedTimer timer; timer.Start(); - cached_data = v8::ScriptData::PreCompile(source_handle); - preparse_time = timer.Elapsed(); - if (cached_data == NULL || cached_data->HasError()) { - fprintf(stderr, "Preparsing failed\n"); + // Allow lazy parsing; otherwise we won't produce cached data. + bool success = Parser::Parse(&info, true); + parse_time1 = timer.Elapsed(); + if (!success) { + fprintf(stderr, "Parsing failed\n"); return std::make_pair(TimeDelta(), TimeDelta()); } } - if (test_mode == PreParseAndParse || test_mode == Parse) { - Handle<String> str = v8::Utils::OpenHandle(*source_handle); - i::Isolate* internal_isolate = str->GetIsolate(); - Handle<Script> script = internal_isolate->factory()->NewScript(str); + // Second round of parsing (consume cached data). + { CompilationInfoWithZone info(script); info.MarkAsGlobal(); - i::ScriptDataImpl* cached_data_impl = - static_cast<i::ScriptDataImpl*>(cached_data); - if (test_mode == PreParseAndParse) { - info.SetCachedData(&cached_data_impl, - i::CONSUME_CACHED_DATA); - } - info.SetContext(v8::Utils::OpenHandle(*context)); + info.SetCachedData(&cached_data_impl, i::CONSUME_CACHED_DATA); ElapsedTimer timer; timer.Start(); - // Allow lazy parsing; otherwise the preparse data won't help. + // Allow lazy parsing; otherwise cached data won't help. bool success = Parser::Parse(&info, true); - parse_time = timer.Elapsed(); + parse_time2 = timer.Elapsed(); if (!success) { fprintf(stderr, "Parsing failed\n"); return std::make_pair(TimeDelta(), TimeDelta()); } } - return std::make_pair(preparse_time, parse_time); + return std::make_pair(parse_time1, parse_time2); } @@ -116,7 +109,6 @@ int main(int argc, char* argv[]) { v8::V8::InitializeICU(); v8::V8::SetFlagsFromCommandLine(&argc, argv, true); Encoding encoding = LATIN1; - TestMode test_mode = PreParseAndParse; std::vector<std::string> fnames; std::string benchmark; int repeat = 1; @@ -127,12 +119,6 @@ int main(int argc, char* argv[]) { encoding = UTF8; } else if (strcmp(argv[i], "--utf16") == 0) { encoding = UTF16; - } else if (strcmp(argv[i], "--preparse-and-parse") == 0) { - test_mode = PreParseAndParse; - } else if (strcmp(argv[i], "--preparse") == 0) { - test_mode = PreParse; - } else if (strcmp(argv[i], "--parse") == 0) { - test_mode = Parse; } else if (strncmp(argv[i], "--benchmark=", 12) == 0) { benchmark = std::string(argv[i]).substr(12); } else if (strncmp(argv[i], "--repeat=", 9) == 0) { @@ -150,20 +136,19 @@ int main(int argc, char* argv[]) { ASSERT(!context.IsEmpty()); { v8::Context::Scope scope(context); - double preparse_total = 0; - double parse_total = 0; + double first_parse_total = 0; + double second_parse_total = 0; for (size_t i = 0; i < fnames.size(); i++) { std::pair<TimeDelta, TimeDelta> time = RunBaselineParser( - fnames[i].c_str(), encoding, repeat, isolate, context, test_mode); - preparse_total += time.first.InMillisecondsF(); - parse_total += time.second.InMillisecondsF(); + fnames[i].c_str(), encoding, repeat, isolate, context); + first_parse_total += time.first.InMillisecondsF(); + second_parse_total += time.second.InMillisecondsF(); } if (benchmark.empty()) benchmark = "Baseline"; - printf("%s(PreParseRunTime): %.f ms\n", benchmark.c_str(), - preparse_total); - printf("%s(ParseRunTime): %.f ms\n", benchmark.c_str(), parse_total); - printf("%s(RunTime): %.f ms\n", benchmark.c_str(), - preparse_total + parse_total); + printf("%s(FirstParseRunTime): %.f ms\n", benchmark.c_str(), + first_parse_total); + printf("%s(SecondParseRunTime): %.f ms\n", benchmark.c_str(), + second_parse_total); } } v8::V8::Dispose(); diff --git a/deps/v8/tools/push-to-trunk/auto_push.py b/deps/v8/tools/push-to-trunk/auto_push.py index 9a43c3f5b..aeaea805b 100755 --- a/deps/v8/tools/push-to-trunk/auto_push.py +++ b/deps/v8/tools/push-to-trunk/auto_push.py @@ -39,7 +39,7 @@ import push_to_trunk SETTINGS_LOCATION = "SETTINGS_LOCATION" CONFIG = { - PERSISTFILE_BASENAME: "/tmp/v8-auto-roll-tempfile", + PERSISTFILE_BASENAME: "/tmp/v8-auto-push-tempfile", DOT_GIT_LOCATION: ".git", SETTINGS_LOCATION: "~/.auto-roll", } diff --git a/deps/v8/tools/push-to-trunk/auto_roll.py b/deps/v8/tools/push-to-trunk/auto_roll.py new file mode 100755 index 000000000..607ca0897 --- /dev/null +++ b/deps/v8/tools/push-to-trunk/auto_roll.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# Copyright 2014 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import sys +import urllib + +from common_includes import * +import chromium_roll + +CONFIG = { + PERSISTFILE_BASENAME: "/tmp/v8-auto-roll-tempfile", +} + +CR_DEPS_URL = 'http://src.chromium.org/svn/trunk/src/DEPS' + +class CheckActiveRoll(Step): + MESSAGE = "Check active roll." + + @staticmethod + def ContainsChromiumRoll(changes): + for change in changes: + if change["subject"].startswith("Update V8 to"): + return True + return False + + def RunStep(self): + params = { + "closed": 3, + "owner": self._options.author, + "limit": 30, + "format": "json", + } + params = urllib.urlencode(params) + search_url = "https://codereview.chromium.org/search" + result = self.ReadURL(search_url, params, wait_plan=[5, 20]) + if self.ContainsChromiumRoll(json.loads(result)["results"]): + print "Stop due to existing Chromium roll." + return True + + +class DetectLastPush(Step): + MESSAGE = "Detect commit ID of the last push to trunk." + + def RunStep(self): + push_hash = self.FindLastTrunkPush(include_patches=True) + self["last_push"] = self.GitSVNFindSVNRev(push_hash) + + +class DetectLastRoll(Step): + MESSAGE = "Detect commit ID of the last Chromium roll." + + def RunStep(self): + # Interpret the DEPS file to retrieve the v8 revision. + Var = lambda var: '%s' + exec(self.ReadURL(CR_DEPS_URL)) + last_roll = vars['v8_revision'] + if last_roll >= self["last_push"]: + print("There is no newer v8 revision than the one in Chromium (%s)." + % last_roll) + return True + + +class RollChromium(Step): + MESSAGE = "Roll V8 into Chromium." + + def RunStep(self): + if self._options.roll: + args = [ + "--author", self._options.author, + "--reviewer", self._options.reviewer, + "--chromium", self._options.chromium, + "--force", + ] + if self._options.sheriff: + args.extend([ + "--sheriff", "--googlers-mapping", self._options.googlers_mapping]) + R = chromium_roll.ChromiumRoll + self._side_effect_handler.Call( + R(chromium_roll.CONFIG, self._side_effect_handler).Run, + args) + + +class AutoRoll(ScriptsBase): + def _PrepareOptions(self, parser): + parser.add_argument("-c", "--chromium", required=True, + help=("The path to your Chromium src/ " + "directory to automate the V8 roll.")) + parser.add_argument("--roll", + help="Make Chromium roll. Dry run if unspecified.", + default=False, action="store_true") + + def _ProcessOptions(self, options): # pragma: no cover + if not options.reviewer: + print "A reviewer (-r) is required." + return False + if not options.author: + print "An author (-a) is required." + return False + return True + + def _Steps(self): + return [ + CheckActiveRoll, + DetectLastPush, + DetectLastRoll, + RollChromium, + ] + + +if __name__ == "__main__": # pragma: no cover + sys.exit(AutoRoll(CONFIG).Run()) diff --git a/deps/v8/tools/push-to-trunk/chromium_roll.py b/deps/v8/tools/push-to-trunk/chromium_roll.py index ef9b8bf38..35ab24b05 100755 --- a/deps/v8/tools/push-to-trunk/chromium_roll.py +++ b/deps/v8/tools/push-to-trunk/chromium_roll.py @@ -30,7 +30,8 @@ class DetectLastPush(Step): MESSAGE = "Detect commit ID of last push to trunk." def RunStep(self): - self["last_push"] = self._options.last_push or self.FindLastTrunkPush() + self["last_push"] = self._options.last_push or self.FindLastTrunkPush( + include_patches=True) self["trunk_revision"] = self.GitSVNFindSVNRev(self["last_push"]) self["push_title"] = self.GitLog(n=1, format="%s", git_hash=self["last_push"]) @@ -89,7 +90,7 @@ class UploadCL(Step): deps) TextToFile(deps, self.Config(DEPS_FILE)) - if self._options.reviewer: + if self._options.reviewer and not self._options.manual: print "Using account %s for review." % self._options.reviewer rev = self._options.reviewer else: @@ -98,7 +99,11 @@ class UploadCL(Step): rev = self.ReadLine() commit_title = "Update V8 to %s." % self["push_title"].lower() - self.GitCommit("%s\n\nTBR=%s" % (commit_title, rev)) + sheriff = "" + if self["sheriff"]: + sheriff = ("\n\nPlease reply to the V8 sheriff %s in case of problems." + % self["sheriff"]) + self.GitCommit("%s%s\n\nTBR=%s" % (commit_title, sheriff, rev)) self.GitUpload(author=self._options.author, force=self._options.force_upload) print "CL uploaded." @@ -158,6 +163,7 @@ class ChromiumRoll(ScriptsBase): Preparation, DetectLastPush, CheckChromium, + DetermineV8Sheriff, SwitchChromium, UpdateChromiumCheckout, UploadCL, diff --git a/deps/v8/tools/push-to-trunk/common_includes.py b/deps/v8/tools/push-to-trunk/common_includes.py index 39b689134..482509f7d 100644 --- a/deps/v8/tools/push-to-trunk/common_includes.py +++ b/deps/v8/tools/push-to-trunk/common_includes.py @@ -28,6 +28,7 @@ import argparse import datetime +import imp import json import os import re @@ -38,9 +39,9 @@ import time import urllib2 from git_recipes import GitRecipesMixin +from git_recipes import GitFailedException PERSISTFILE_BASENAME = "PERSISTFILE_BASENAME" -TEMP_BRANCH = "TEMP_BRANCH" BRANCHNAME = "BRANCHNAME" DOT_GIT_LOCATION = "DOT_GIT_LOCATION" VERSION_FILE = "VERSION_FILE" @@ -174,6 +175,7 @@ def Command(cmd, args="", prefix="", pipe=True): # TODO(machenbach): Use timeout. cmd_line = "%s %s %s" % (prefix, cmd, args) print "Command: %s" % cmd_line + sys.stdout.flush() try: if pipe: return subprocess.check_output(cmd_line, shell=True) @@ -181,6 +183,9 @@ def Command(cmd, args="", prefix="", pipe=True): return subprocess.check_call(cmd_line, shell=True) except subprocess.CalledProcessError: return None + finally: + sys.stdout.flush() + sys.stderr.flush() # Wrapper for side effects. @@ -215,10 +220,6 @@ class NoRetryException(Exception): pass -class GitFailedException(Exception): - pass - - class Step(GitRecipesMixin): def __init__(self, text, requires, number, config, state, options, handler): self._text = text @@ -257,10 +258,11 @@ class Step(GitRecipesMixin): return print ">>> Step %d: %s" % (self._number, self._text) - self.RunStep() - - # Persist state. - TextToFile(json.dumps(self._state), state_file) + try: + return self.RunStep() + finally: + # Persist state. + TextToFile(json.dumps(self._state), state_file) def RunStep(self): # pragma: no cover raise NotImplementedError @@ -377,18 +379,11 @@ class Step(GitRecipesMixin): self.GitSVNFetch() def PrepareBranch(self): - # Get ahold of a safe temporary branch and check it out. - if self["current_branch"] != self._config[TEMP_BRANCH]: - self.DeleteBranch(self._config[TEMP_BRANCH]) - self.GitCreateBranch(self._config[TEMP_BRANCH]) - # Delete the branch that will be created later if it exists already. self.DeleteBranch(self._config[BRANCHNAME]) def CommonCleanup(self): self.GitCheckout(self["current_branch"]) - if self._config[TEMP_BRANCH] != self["current_branch"]: - self.GitDeleteBranch(self._config[TEMP_BRANCH]) if self._config[BRANCHNAME] != self["current_branch"]: self.GitDeleteBranch(self._config[BRANCHNAME]) @@ -441,8 +436,12 @@ class Step(GitRecipesMixin): except GitFailedException: self.WaitForResolvingConflicts(patch_file) - def FindLastTrunkPush(self, parent_hash=""): - push_pattern = "^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based" + def FindLastTrunkPush(self, parent_hash="", include_patches=False): + push_pattern = "^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*" + if not include_patches: + # Non-patched versions only have three numbers followed by the "(based + # on...) comment." + push_pattern += " (based" branch = "" if parent_hash else "svn/trunk" return self.GitLog(n=1, format="%H", grep=push_pattern, parent_hash=parent_hash, branch=branch) @@ -462,6 +461,40 @@ class UploadStep(Step): self.GitUpload(reviewer, self._options.author, self._options.force_upload) +class DetermineV8Sheriff(Step): + MESSAGE = "Determine the V8 sheriff for code review." + + def RunStep(self): + self["sheriff"] = None + if not self._options.sheriff: # pragma: no cover + return + + try: + # The googlers mapping maps @google.com accounts to @chromium.org + # accounts. + googlers = imp.load_source('googlers_mapping', + self._options.googlers_mapping) + googlers = googlers.list_to_dict(googlers.get_list()) + except: # pragma: no cover + print "Skip determining sheriff without googler mapping." + return + + # The sheriff determined by the rotation on the waterfall has a + # @google.com account. + url = "https://chromium-build.appspot.com/p/chromium/sheriff_v8.js" + match = re.match(r"document\.write\('(\w+)'\)", self.ReadURL(url)) + + # If "channel is sheriff", we can't match an account. + if match: + g_name = match.group(1) + self["sheriff"] = googlers.get(g_name + "@google.com", + g_name + "@chromium.org") + self._options.reviewer = self["sheriff"] + print "Found active sheriff: %s" % self["sheriff"] + else: + print "No active sheriff found." + + def MakeStep(step_class=Step, number=0, state=None, config=None, options=None, side_effect_handler=DEFAULT_SIDE_EFFECT_HANDLER): # Allow to pass in empty dictionaries. @@ -506,11 +539,17 @@ class ScriptsBase(object): parser = argparse.ArgumentParser(description=self._Description()) parser.add_argument("-a", "--author", default="", help="The author email used for rietveld.") + parser.add_argument("-g", "--googlers-mapping", + help="Path to the script mapping google accounts.") parser.add_argument("-r", "--reviewer", default="", help="The account name to be used for reviews.") + parser.add_argument("--sheriff", default=False, action="store_true", + help=("Determine current sheriff to review CLs. On " + "success, this will overwrite the reviewer " + "option.")) parser.add_argument("-s", "--step", - help="Specify the step where to start work. Default: 0.", - default=0, type=int) + help="Specify the step where to start work. Default: 0.", + default=0, type=int) self._PrepareOptions(parser) @@ -524,6 +563,10 @@ class ScriptsBase(object): print "Bad step number %d" % options.step parser.print_help() return None + if options.sheriff and not options.googlers_mapping: # pragma: no cover + print "To determine the current sheriff, requires the googler mapping" + parser.print_help() + return None # Defaults for options, common to all scripts. options.manual = getattr(options, "manual", True) @@ -555,7 +598,8 @@ class ScriptsBase(object): steps.append(MakeStep(step_class, number, self._state, self._config, options, self._side_effect_handler)) for step in steps[options.step:]: - step.Run() + if step.Run(): + return 1 return 0 def Run(self, args=None): diff --git a/deps/v8/tools/push-to-trunk/git_recipes.py b/deps/v8/tools/push-to-trunk/git_recipes.py index 8e84d4533..8c1e314d7 100644 --- a/deps/v8/tools/push-to-trunk/git_recipes.py +++ b/deps/v8/tools/push-to-trunk/git_recipes.py @@ -28,6 +28,11 @@ import re + +class GitFailedException(Exception): + pass + + def Strip(f): def new_f(*args, **kwargs): return f(*args, **kwargs).strip() @@ -59,6 +64,13 @@ class GitRecipesMixin(object): assert name self.Git(MakeArgs(["branch -D", name])) + def GitReset(self, name): + assert name + self.Git(MakeArgs(["reset --hard", name])) + + def GitRemotes(self): + return map(str.strip, self.Git(MakeArgs(["branch -r"])).splitlines()) + def GitCheckout(self, name): assert name self.Git(MakeArgs(["checkout -f", name])) @@ -68,6 +80,26 @@ class GitRecipesMixin(object): assert branch_or_hash self.Git(MakeArgs(["checkout -f", branch_or_hash, "--", name])) + def GitCheckoutFileSafe(self, name, branch_or_hash): + try: + self.GitCheckoutFile(name, branch_or_hash) + except GitFailedException: # pragma: no cover + # The file doesn't exist in that revision. + return False + return True + + def GitChangedFiles(self, git_hash): + assert git_hash + try: + files = self.Git(MakeArgs(["diff --name-only", + git_hash, + "%s^" % git_hash])) + return map(str.strip, files.splitlines()) + except GitFailedException: # pragma: no cover + # Git fails using "^" at branch roots. + return [] + + @Strip def GitCurrentBranch(self): for line in self.Git("status -s -b -uno").strip().splitlines(): @@ -85,7 +117,7 @@ class GitRecipesMixin(object): if format: args.append("--format=%s" % format) if grep: - args.append("--grep=\"%s\"" % grep) + args.append("--grep=\"%s\"" % grep.replace("\"", "\\\"")) if reverse: args.append("--reverse") if git_hash: @@ -99,6 +131,7 @@ class GitRecipesMixin(object): assert git_hash return self.Git(MakeArgs(["log", "-1", "-p", git_hash])) + # TODO(machenbach): Unused? Remove. def GitAdd(self, name): assert name self.Git(MakeArgs(["add", Quoted(name)])) @@ -147,6 +180,7 @@ class GitRecipesMixin(object): def GitSVNFetch(self): self.Git("svn fetch") + # TODO(machenbach): Unused? Remove. @Strip def GitSVNLog(self): return self.Git("svn log -1 --oneline") diff --git a/deps/v8/tools/push-to-trunk/merge_to_branch.py b/deps/v8/tools/push-to-trunk/merge_to_branch.py index f0acd143e..bd9531fb9 100755 --- a/deps/v8/tools/push-to-trunk/merge_to_branch.py +++ b/deps/v8/tools/push-to-trunk/merge_to_branch.py @@ -41,7 +41,6 @@ CONFIG = { PERSISTFILE_BASENAME: "/tmp/v8-merge-to-branch-tempfile", ALREADY_MERGING_SENTINEL_FILE: "/tmp/v8-merge-to-branch-tempfile-already-merging", - TEMP_BRANCH: "prepare-merge-temporary-branch-created-by-script", DOT_GIT_LOCATION: ".git", VERSION_FILE: "src/version.cc", TEMPORARY_PATCH_FILE: "/tmp/v8-prepare-merge-tempfile-temporary-patch", @@ -134,16 +133,8 @@ class FindGitRevisions(Step): if not self["revision_list"]: # pragma: no cover self.Die("Revision list is empty.") - if self._options.revert: - if not self._options.revert_bleeding_edge: - self["new_commit_msg"] = ("Rollback of %s in %s branch." - % (self["revision_list"], self["merge_to_branch"])) - else: - self["new_commit_msg"] = "Revert %s." % self["revision_list"] - else: - self["new_commit_msg"] = ("Merged %s into %s branch." - % (self["revision_list"], self["merge_to_branch"])) - self["new_commit_msg"] += "\n\n" + # The commit message title is added below after the version is specified. + self["new_commit_msg"] = "" for commit_hash in self["patch_commit_hashes"]: patch_merge_desc = self.GitLog(n=1, format="%s", git_hash=commit_hash) @@ -155,10 +146,9 @@ class FindGitRevisions(Step): for bug in re.findall(r"^[ \t]*BUG[ \t]*=[ \t]*(.*?)[ \t]*$", msg, re.M): bugs.extend(map(lambda s: s.strip(), bug.split(","))) - bug_aggregate = ",".join(sorted(bugs)) + bug_aggregate = ",".join(sorted(filter(lambda s: s and s != "none", bugs))) if bug_aggregate: self["new_commit_msg"] += "BUG=%s\nLOG=N\n" % bug_aggregate - TextToFile(self["new_commit_msg"], self.Config(COMMITMSG_FILE)) class ApplyPatches(Step): @@ -181,7 +171,7 @@ class PrepareVersion(Step): def RunStep(self): if self._options.revert_bleeding_edge: return - # These version numbers are used again for creating the tag + # This is used to calculate the patch level increment. self.ReadAndPersistVersion() @@ -204,12 +194,28 @@ class IncrementVersion(Step): else: self.Editor(self.Config(VERSION_FILE)) self.ReadAndPersistVersion("new_") + self["version"] = "%s.%s.%s.%s" % (self["new_major"], + self["new_minor"], + self["new_build"], + self["new_patch"]) class CommitLocal(Step): MESSAGE = "Commit to local branch." def RunStep(self): + # Add a commit message title. + if self._options.revert: + if not self._options.revert_bleeding_edge: + title = ("Version %s (rollback of %s)" + % (self["version"], self["revision_list"])) + else: + title = "Revert %s." % self["revision_list"] + else: + title = ("Version %s (merged %s)" + % (self["version"], self["revision_list"])) + self["new_commit_msg"] = "%s\n\n%s" % (title, self["new_commit_msg"]) + TextToFile(self["new_commit_msg"], self.Config(COMMITMSG_FILE)) self.GitCommit(file_name=self.Config(COMMITMSG_FILE)) @@ -244,10 +250,6 @@ class TagRevision(Step): def RunStep(self): if self._options.revert_bleeding_edge: return - self["version"] = "%s.%s.%s.%s" % (self["new_major"], - self["new_minor"], - self["new_build"], - self["new_patch"]) print "Creating tag svn/tags/%s" % self["version"] if self["merge_to_branch"] == "trunk": self["to_url"] = "trunk" diff --git a/deps/v8/tools/push-to-trunk/push_to_trunk.py b/deps/v8/tools/push-to-trunk/push_to_trunk.py index b487b0f8f..c317bdc73 100755 --- a/deps/v8/tools/push-to-trunk/push_to_trunk.py +++ b/deps/v8/tools/push-to-trunk/push_to_trunk.py @@ -39,7 +39,6 @@ CONFIG = { BRANCHNAME: "prepare-push", TRUNKBRANCH: "trunk-push", PERSISTFILE_BASENAME: "/tmp/v8-push-to-trunk-tempfile", - TEMP_BRANCH: "prepare-push-temporary-branch-created-by-script", DOT_GIT_LOCATION: ".git", VERSION_FILE: "src/version.cc", CHANGELOG_FILE: "ChangeLog", @@ -58,6 +57,11 @@ class Preparation(Step): def RunStep(self): self.InitialEnvironmentChecks() self.CommonPrepare() + + if(self["current_branch"] == self.Config(TRUNKBRANCH) + or self["current_branch"] == self.Config(BRANCHNAME)): + print "Warning: Script started on branch %s" % self["current_branch"] + self.PrepareBranch() self.DeleteBranch(self.Config(TRUNKBRANCH)) diff --git a/deps/v8/tools/push-to-trunk/releases.py b/deps/v8/tools/push-to-trunk/releases.py new file mode 100755 index 000000000..2a22b912e --- /dev/null +++ b/deps/v8/tools/push-to-trunk/releases.py @@ -0,0 +1,463 @@ +#!/usr/bin/env python +# Copyright 2014 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script retrieves the history of all V8 branches and trunk revisions and +# their corresponding Chromium revisions. + +# Requires a chromium checkout with branch heads: +# gclient sync --with_branch_heads +# gclient fetch + +import argparse +import csv +import itertools +import json +import os +import re +import sys + +from common_includes import * + +DEPS_FILE = "DEPS_FILE" +CHROMIUM = "CHROMIUM" + +CONFIG = { + BRANCHNAME: "retrieve-v8-releases", + PERSISTFILE_BASENAME: "/tmp/v8-releases-tempfile", + DOT_GIT_LOCATION: ".git", + VERSION_FILE: "src/version.cc", + DEPS_FILE: "DEPS", +} + +# Expression for retrieving the bleeding edge revision from a commit message. +PUSH_MESSAGE_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$") + +# Expression for retrieving the merged patches from a merge commit message +# (old and new format). +MERGE_MESSAGE_RE = re.compile(r"^.*[M|m]erged (.+)(\)| into).*$", re.M) + +# Expression for retrieving reverted patches from a commit message (old and +# new format). +ROLLBACK_MESSAGE_RE = re.compile(r"^.*[R|r]ollback of (.+)(\)| in).*$", re.M) + +# Expression for retrieving the code review link. +REVIEW_LINK_RE = re.compile(r"^Review URL: (.+)$", re.M) + +# Expression with three versions (historical) for extracting the v8 revision +# from the chromium DEPS file. +DEPS_RE = re.compile(r'^\s*(?:"v8_revision": "' + '|\(Var\("googlecode_url"\) % "v8"\) \+ "\/trunk@' + '|"http\:\/\/v8\.googlecode\.com\/svn\/trunk@)' + '([0-9]+)".*$', re.M) + + +def SortingKey(version): + """Key for sorting version number strings: '3.11' > '3.2.1.1'""" + version_keys = map(int, version.split(".")) + # Fill up to full version numbers to normalize comparison. + while len(version_keys) < 4: + version_keys.append(0) + # Fill digits. + return ".".join(map("{0:03d}".format, version_keys)) + + +def SortBranches(branches): + """Sort branches with version number names.""" + return sorted(branches, key=SortingKey, reverse=True) + + +def FilterDuplicatesAndReverse(cr_releases): + """Returns the chromium releases in reverse order filtered by v8 revision + duplicates. + + cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev. + """ + last = "" + result = [] + for release in reversed(cr_releases): + if last == release[1]: + continue + last = release[1] + result.append(release) + return result + + +def BuildRevisionRanges(cr_releases): + """Returns a mapping of v8 revision -> chromium ranges. + The ranges are comma-separated, each range has the form R1:R2. The newest + entry is the only one of the form R1, as there is no end range. + + cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev. + cr_rev either refers to a chromium svn revision or a chromium branch number. + """ + range_lists = {} + cr_releases = FilterDuplicatesAndReverse(cr_releases) + + # Visit pairs of cr releases from oldest to newest. + for cr_from, cr_to in itertools.izip( + cr_releases, itertools.islice(cr_releases, 1, None)): + + # Assume the chromium revisions are all different. + assert cr_from[0] != cr_to[0] + + # TODO(machenbach): Subtraction is not git friendly. + ran = "%s:%d" % (cr_from[0], int(cr_to[0]) - 1) + + # Collect the ranges in lists per revision. + range_lists.setdefault(cr_from[1], []).append(ran) + + # Add the newest revision. + if cr_releases: + range_lists.setdefault(cr_releases[-1][1], []).append(cr_releases[-1][0]) + + # Stringify and comma-separate the range lists. + return dict((rev, ", ".join(ran)) for rev, ran in range_lists.iteritems()) + + +def MatchSafe(match): + if match: + return match.group(1) + else: + return "" + + +class Preparation(Step): + MESSAGE = "Preparation." + + def RunStep(self): + self.CommonPrepare() + self.PrepareBranch() + + +class RetrieveV8Releases(Step): + MESSAGE = "Retrieve all V8 releases." + + def ExceedsMax(self, releases): + return (self._options.max_releases > 0 + and len(releases) > self._options.max_releases) + + def GetBleedingEdgeFromPush(self, title): + return MatchSafe(PUSH_MESSAGE_RE.match(title)) + + def GetMergedPatches(self, body): + patches = MatchSafe(MERGE_MESSAGE_RE.search(body)) + if not patches: + patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body)) + if patches: + # Indicate reverted patches with a "-". + patches = "-%s" % patches + return patches + + def GetRelease(self, git_hash, branch): + self.ReadAndPersistVersion() + base_version = [self["major"], self["minor"], self["build"]] + version = ".".join(base_version) + body = self.GitLog(n=1, format="%B", git_hash=git_hash) + + patches = "" + if self["patch"] != "0": + version += ".%s" % self["patch"] + patches = self.GetMergedPatches(body) + + title = self.GitLog(n=1, format="%s", git_hash=git_hash) + revision = self.GitSVNFindSVNRev(git_hash) + return { + # The SVN revision on the branch. + "revision": revision, + # The SVN revision on bleeding edge (only for newer trunk pushes). + "bleeding_edge": self.GetBleedingEdgeFromPush(title), + # The branch name. + "branch": branch, + # The version for displaying in the form 3.26.3 or 3.26.3.12. + "version": version, + # The date of the commit. + "date": self.GitLog(n=1, format="%ci", git_hash=git_hash), + # Merged patches if available in the form 'r1234, r2345'. + "patches_merged": patches, + # Default for easier output formatting. + "chromium_revision": "", + # Default for easier output formatting. + "chromium_branch": "", + # Link to the CL on code review. Trunk pushes are not uploaded, so this + # field will be populated below with the recent roll CL link. + "review_link": MatchSafe(REVIEW_LINK_RE.search(body)), + # Link to the commit message on google code. + "revision_link": ("https://code.google.com/p/v8/source/detail?r=%s" + % revision), + }, self["patch"] + + def GetReleasesFromBranch(self, branch): + self.GitReset("svn/%s" % branch) + releases = [] + try: + for git_hash in self.GitLog(format="%H").splitlines(): + if self._config[VERSION_FILE] not in self.GitChangedFiles(git_hash): + continue + if self.ExceedsMax(releases): + break # pragma: no cover + if not self.GitCheckoutFileSafe(self._config[VERSION_FILE], git_hash): + break # pragma: no cover + + release, patch_level = self.GetRelease(git_hash, branch) + releases.append(release) + + # Follow branches only until their creation point. + # TODO(machenbach): This omits patches if the version file wasn't + # manipulated correctly. Find a better way to detect the point where + # the parent of the branch head leads to the trunk branch. + if branch != "trunk" and patch_level == "0": + break + + # Allow Ctrl-C interrupt. + except (KeyboardInterrupt, SystemExit): # pragma: no cover + pass + + # Clean up checked-out version file. + self.GitCheckoutFileSafe(self._config[VERSION_FILE], "HEAD") + return releases + + def RunStep(self): + self.GitCreateBranch(self._config[BRANCHNAME]) + # Get relevant remote branches, e.g. "svn/3.25". + branches = filter(lambda s: re.match(r"^svn/\d+\.\d+$", s), + self.GitRemotes()) + # Remove 'svn/' prefix. + branches = map(lambda s: s[4:], branches) + + releases = [] + if self._options.branch == 'recent': + # Get only recent development on trunk, beta and stable. + if self._options.max_releases == 0: # pragma: no cover + self._options.max_releases = 10 + beta, stable = SortBranches(branches)[0:2] + releases += self.GetReleasesFromBranch(stable) + releases += self.GetReleasesFromBranch(beta) + releases += self.GetReleasesFromBranch("trunk") + elif self._options.branch == 'all': # pragma: no cover + # Retrieve the full release history. + for branch in branches: + releases += self.GetReleasesFromBranch(branch) + releases += self.GetReleasesFromBranch("trunk") + else: # pragma: no cover + # Retrieve history for a specified branch. + assert self._options.branch in branches + ["trunk"] + releases += self.GetReleasesFromBranch(self._options.branch) + + self["releases"] = sorted(releases, + key=lambda r: SortingKey(r["version"]), + reverse=True) + + +# TODO(machenbach): Parts of the Chromium setup are c/p from the chromium_roll +# script -> unify. +class CheckChromium(Step): + MESSAGE = "Check the chromium checkout." + + def Run(self): + self["chrome_path"] = self._options.chromium + + +class SwitchChromium(Step): + MESSAGE = "Switch to Chromium checkout." + REQUIRES = "chrome_path" + + def RunStep(self): + self["v8_path"] = os.getcwd() + os.chdir(self["chrome_path"]) + # Check for a clean workdir. + if not self.GitIsWorkdirClean(): # pragma: no cover + self.Die("Workspace is not clean. Please commit or undo your changes.") + # Assert that the DEPS file is there. + if not os.path.exists(self.Config(DEPS_FILE)): # pragma: no cover + self.Die("DEPS file not present.") + + +class UpdateChromiumCheckout(Step): + MESSAGE = "Update the checkout and create a new branch." + REQUIRES = "chrome_path" + + def RunStep(self): + os.chdir(self["chrome_path"]) + self.GitCheckout("master") + self.GitPull() + self.GitCreateBranch(self.Config(BRANCHNAME)) + + +class RetrieveChromiumV8Releases(Step): + MESSAGE = "Retrieve V8 releases from Chromium DEPS." + REQUIRES = "chrome_path" + + def RunStep(self): + os.chdir(self["chrome_path"]) + + trunk_releases = filter(lambda r: r["branch"] == "trunk", self["releases"]) + if not trunk_releases: # pragma: no cover + print "No trunk releases detected. Skipping chromium history." + return True + + oldest_v8_rev = int(trunk_releases[-1]["revision"]) + + cr_releases = [] + try: + for git_hash in self.GitLog(format="%H", grep="V8").splitlines(): + if self._config[DEPS_FILE] not in self.GitChangedFiles(git_hash): + continue + if not self.GitCheckoutFileSafe(self._config[DEPS_FILE], git_hash): + break # pragma: no cover + deps = FileToText(self.Config(DEPS_FILE)) + match = DEPS_RE.search(deps) + if match: + svn_rev = self.GitSVNFindSVNRev(git_hash) + v8_rev = match.group(1) + cr_releases.append([svn_rev, v8_rev]) + + # Stop after reaching beyond the last v8 revision we want to update. + # We need a small buffer for possible revert/reland frenzies. + # TODO(machenbach): Subtraction is not git friendly. + if int(v8_rev) < oldest_v8_rev - 100: + break # pragma: no cover + + # Allow Ctrl-C interrupt. + except (KeyboardInterrupt, SystemExit): # pragma: no cover + pass + + # Clean up. + self.GitCheckoutFileSafe(self._config[DEPS_FILE], "HEAD") + + # Add the chromium ranges to the v8 trunk releases. + all_ranges = BuildRevisionRanges(cr_releases) + trunk_dict = dict((r["revision"], r) for r in trunk_releases) + for revision, ranges in all_ranges.iteritems(): + trunk_dict.get(revision, {})["chromium_revision"] = ranges + + +# TODO(machenbach): Unify common code with method above. +class RietrieveChromiumBranches(Step): + MESSAGE = "Retrieve Chromium branch information." + REQUIRES = "chrome_path" + + def RunStep(self): + os.chdir(self["chrome_path"]) + + trunk_releases = filter(lambda r: r["branch"] == "trunk", self["releases"]) + if not trunk_releases: # pragma: no cover + print "No trunk releases detected. Skipping chromium history." + return True + + oldest_v8_rev = int(trunk_releases[-1]["revision"]) + + # Filter out irrelevant branches. + branches = filter(lambda r: re.match(r"branch-heads/\d+", r), + self.GitRemotes()) + + # Transform into pure branch numbers. + branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)), + branches) + + branches = sorted(branches, reverse=True) + + cr_branches = [] + try: + for branch in branches: + if not self.GitCheckoutFileSafe(self._config[DEPS_FILE], + "branch-heads/%d" % branch): + break # pragma: no cover + deps = FileToText(self.Config(DEPS_FILE)) + match = DEPS_RE.search(deps) + if match: + v8_rev = match.group(1) + cr_branches.append([str(branch), v8_rev]) + + # Stop after reaching beyond the last v8 revision we want to update. + # We need a small buffer for possible revert/reland frenzies. + # TODO(machenbach): Subtraction is not git friendly. + if int(v8_rev) < oldest_v8_rev - 100: + break # pragma: no cover + + # Allow Ctrl-C interrupt. + except (KeyboardInterrupt, SystemExit): # pragma: no cover + pass + + # Clean up. + self.GitCheckoutFileSafe(self._config[DEPS_FILE], "HEAD") + + # Add the chromium branches to the v8 trunk releases. + all_ranges = BuildRevisionRanges(cr_branches) + trunk_dict = dict((r["revision"], r) for r in trunk_releases) + for revision, ranges in all_ranges.iteritems(): + trunk_dict.get(revision, {})["chromium_branch"] = ranges + + +class SwitchV8(Step): + MESSAGE = "Returning to V8 checkout." + REQUIRES = "chrome_path" + + def RunStep(self): + self.GitCheckout("master") + self.GitDeleteBranch(self.Config(BRANCHNAME)) + os.chdir(self["v8_path"]) + + +class CleanUp(Step): + MESSAGE = "Clean up." + + def RunStep(self): + self.CommonCleanup() + + +class WriteOutput(Step): + MESSAGE = "Print output." + + def Run(self): + if self._options.csv: + with open(self._options.csv, "w") as f: + writer = csv.DictWriter(f, + ["version", "branch", "revision", + "chromium_revision", "patches_merged"], + restval="", + extrasaction="ignore") + for release in self["releases"]: + writer.writerow(release) + if self._options.json: + with open(self._options.json, "w") as f: + f.write(json.dumps(self["releases"])) + if not self._options.csv and not self._options.json: + print self["releases"] # pragma: no cover + + +class Releases(ScriptsBase): + def _PrepareOptions(self, parser): + parser.add_argument("-b", "--branch", default="recent", + help=("The branch to analyze. If 'all' is specified, " + "analyze all branches. If 'recent' (default) " + "is specified, track beta, stable and trunk.")) + parser.add_argument("-c", "--chromium", + help=("The path to your Chromium src/ " + "directory to automate the V8 roll.")) + parser.add_argument("--csv", help="Path to a CSV file for export.") + parser.add_argument("-m", "--max-releases", type=int, default=0, + help="The maximum number of releases to track.") + parser.add_argument("--json", help="Path to a JSON file for export.") + + def _ProcessOptions(self, options): # pragma: no cover + return True + + def _Steps(self): + return [ + Preparation, + RetrieveV8Releases, + CheckChromium, + SwitchChromium, + UpdateChromiumCheckout, + RetrieveChromiumV8Releases, + RietrieveChromiumBranches, + SwitchV8, + CleanUp, + WriteOutput, + ] + + +if __name__ == "__main__": # pragma: no cover + sys.exit(Releases(CONFIG).Run()) diff --git a/deps/v8/tools/push-to-trunk/test_scripts.py b/deps/v8/tools/push-to-trunk/test_scripts.py index 9107db97e..bc79cfd5d 100644 --- a/deps/v8/tools/push-to-trunk/test_scripts.py +++ b/deps/v8/tools/push-to-trunk/test_scripts.py @@ -34,6 +34,7 @@ import unittest import auto_push from auto_push import CheckLastPush from auto_push import SETTINGS_LOCATION +import auto_roll import common_includes from common_includes import * import merge_to_branch @@ -44,13 +45,14 @@ import chromium_roll from chromium_roll import CHROMIUM from chromium_roll import DEPS_FILE from chromium_roll import ChromiumRoll +import releases +from releases import Releases TEST_CONFIG = { BRANCHNAME: "test-prepare-push", TRUNKBRANCH: "test-trunk-push", PERSISTFILE_BASENAME: "/tmp/test-v8-push-to-trunk-tempfile", - TEMP_BRANCH: "test-prepare-push-temporary-branch-created-by-script", DOT_GIT_LOCATION: None, VERSION_FILE: None, CHANGELOG_FILE: None, @@ -74,6 +76,38 @@ AUTO_PUSH_ARGS = [ class ToplevelTest(unittest.TestCase): + def testSortBranches(self): + S = releases.SortBranches + self.assertEquals(["3.1", "2.25"], S(["2.25", "3.1"])[0:2]) + self.assertEquals(["3.0", "2.25"], S(["2.25", "3.0", "2.24"])[0:2]) + self.assertEquals(["3.11", "3.2"], S(["3.11", "3.2", "2.24"])[0:2]) + + def testFilterDuplicatesAndReverse(self): + F = releases.FilterDuplicatesAndReverse + self.assertEquals([], F([])) + self.assertEquals([["100", "10"]], F([["100", "10"]])) + self.assertEquals([["99", "9"], ["100", "10"]], + F([["100", "10"], ["99", "9"]])) + self.assertEquals([["98", "9"], ["100", "10"]], + F([["100", "10"], ["99", "9"], ["98", "9"]])) + self.assertEquals([["98", "9"], ["99", "10"]], + F([["100", "10"], ["99", "10"], ["98", "9"]])) + + def testBuildRevisionRanges(self): + B = releases.BuildRevisionRanges + self.assertEquals({}, B([])) + self.assertEquals({"10": "100"}, B([["100", "10"]])) + self.assertEquals({"10": "100", "9": "99:99"}, + B([["100", "10"], ["99", "9"]])) + self.assertEquals({"10": "100", "9": "97:99"}, + B([["100", "10"], ["98", "9"], ["97", "9"]])) + self.assertEquals({"10": "100", "9": "99:99", "3": "91:98"}, + B([["100", "10"], ["99", "9"], ["91", "3"]])) + self.assertEquals({"13": "101", "12": "100:100", "9": "94:97", + "3": "91:93, 98:99"}, + B([["101", "13"], ["100", "12"], ["98", "3"], + ["94", "9"], ["91", "3"]])) + def testMakeComment(self): self.assertEquals("# Line 1\n# Line 2\n#", MakeComment(" Line 1\n Line 2\n")) @@ -261,7 +295,7 @@ class SimpleMock(object): # arguments. if len(args) > len(expected_call['args']): raise NoRetryException("When calling %s with arguments, the " - "expectations must consist of at least as many arguments.") + "expectations must consist of at least as many arguments." % name) # Compare expected and actual arguments. for (expected_arg, actual_arg) in zip(expected_call['args'], args): @@ -296,14 +330,14 @@ class ScriptTest(unittest.TestCase): self._tmp_files.append(name) return name - def WriteFakeVersionFile(self, build=4): + def WriteFakeVersionFile(self, minor=22, build=4, patch=0): with open(TEST_CONFIG[VERSION_FILE], "w") as f: f.write(" // Some line...\n") f.write("\n") f.write("#define MAJOR_VERSION 3\n") - f.write("#define MINOR_VERSION 22\n") + f.write("#define MINOR_VERSION %s\n" % minor) f.write("#define BUILD_NUMBER %s\n" % build) - f.write("#define PATCH_LEVEL 0\n") + f.write("#define PATCH_LEVEL %s\n" % patch) f.write(" // Some line...\n") f.write("#define IS_CANDIDATE_VERSION 0\n") @@ -400,10 +434,8 @@ class ScriptTest(unittest.TestCase): Git("status -s -uno", ""), Git("status -s -b -uno", "## some_branch"), Git("svn fetch", ""), - Git("branch", " branch1\n* %s" % TEST_CONFIG[TEMP_BRANCH]), - Git("branch -D %s" % TEST_CONFIG[TEMP_BRANCH], ""), - Git("checkout -b %s" % TEST_CONFIG[TEMP_BRANCH], ""), - Git("branch", ""), + Git("branch", " branch1\n* %s" % TEST_CONFIG[BRANCHNAME]), + Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], ""), ]) self.ExpectReadline([RL("Y")]) self.MakeStep().CommonPrepare() @@ -415,7 +447,7 @@ class ScriptTest(unittest.TestCase): Git("status -s -uno", ""), Git("status -s -b -uno", "## some_branch"), Git("svn fetch", ""), - Git("branch", " branch1\n* %s" % TEST_CONFIG[TEMP_BRANCH]), + Git("branch", " branch1\n* %s" % TEST_CONFIG[BRANCHNAME]), ]) self.ExpectReadline([RL("n")]) self.MakeStep().CommonPrepare() @@ -427,8 +459,8 @@ class ScriptTest(unittest.TestCase): Git("status -s -uno", ""), Git("status -s -b -uno", "## some_branch"), Git("svn fetch", ""), - Git("branch", " branch1\n* %s" % TEST_CONFIG[TEMP_BRANCH]), - Git("branch -D %s" % TEST_CONFIG[TEMP_BRANCH], None), + Git("branch", " branch1\n* %s" % TEST_CONFIG[BRANCHNAME]), + Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], None), ]) self.ExpectReadline([RL("Y")]) self.MakeStep().CommonPrepare() @@ -692,8 +724,6 @@ Performance and stability improvements on all platforms.""", commit) Git("status -s -b -uno", "## some_branch\n"), Git("svn fetch", ""), Git("branch", " branch1\n* branch2\n"), - Git("checkout -b %s" % TEST_CONFIG[TEMP_BRANCH], ""), - Git("branch", " branch1\n* branch2\n"), Git("branch", " branch1\n* branch2\n"), Git("checkout -b %s svn/bleeding_edge" % TEST_CONFIG[BRANCHNAME], ""), Git("svn find-rev r123455", "push_hash\n"), @@ -726,7 +756,6 @@ Performance and stability improvements on all platforms.""", commit) Git("svn dcommit 2>&1", "Some output\nCommitted r123456\nSome output\n"), Git("svn tag 3.22.5 -m \"Tagging version 3.22.5\"", ""), Git("checkout -f some_branch", ""), - Git("branch -D %s" % TEST_CONFIG[TEMP_BRANCH], ""), Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], ""), Git("branch -D %s" % TEST_CONFIG[TRUNKBRANCH], ""), ]) @@ -768,8 +797,15 @@ Performance and stability improvements on all platforms.""", commit) def testPushToTrunkForced(self): self._PushToTrunk(force=True) - def _ChromiumRoll(self, force=False, manual=False): + googlers_mapping_py = "%s-mapping.py" % TEST_CONFIG[PERSISTFILE_BASENAME] + with open(googlers_mapping_py, "w") as f: + f.write(""" +def list_to_dict(entries): + return {"g_name@google.com": "c_name@chromium.org"} +def get_list(): + pass""") + TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile() if not os.path.exists(TEST_CONFIG[CHROMIUM]): os.makedirs(TEST_CONFIG[CHROMIUM]) @@ -783,7 +819,7 @@ Performance and stability improvements on all platforms.""", commit) Git("status -s -b -uno", "## some_branch\n"), Git("svn fetch", ""), Git(("log -1 --format=%H --grep=" - "\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\" " + "\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" " "svn/trunk"), "push_hash\n"), Git("svn find-rev push_hash", "123455\n"), Git("log -1 --format=%s push_hash", @@ -794,23 +830,30 @@ Performance and stability improvements on all platforms.""", commit) Git("checkout -b v8-roll-123455", ""), Git(("commit -am \"Update V8 to version 3.22.5 " "(based on bleeding_edge revision r123454).\n\n" - "TBR=reviewer@chromium.org\""), + "Please reply to the V8 sheriff c_name@chromium.org in " + "case of problems.\n\nTBR=c_name@chromium.org\""), ""), Git(("cl upload --send-mail --email \"author@chromium.org\"%s" % force_flag), ""), ]) + self.ExpectReadURL([ + URL("https://chromium-build.appspot.com/p/chromium/sheriff_v8.js", + "document.write('g_name')"), + ]) + # Expected keyboard input in manual mode: if manual: self.ExpectReadline([ - RL("reviewer@chromium.org"), # Chromium reviewer. + RL("c_name@chromium.org"), # Chromium reviewer. ]) # Expected keyboard input in semi-automatic mode and forced mode: if not manual: self.ExpectReadline([]) - args = ["-a", "author@chromium.org", "-c", TEST_CONFIG[CHROMIUM]] + args = ["-a", "author@chromium.org", "-c", TEST_CONFIG[CHROMIUM], + "--sheriff", "--googlers-mapping", googlers_mapping_py] if force: args.append("-f") if manual: args.append("-m") else: args += ["-r", "reviewer@chromium.org"] @@ -908,6 +951,70 @@ Performance and stability improvements on all platforms.""", commit) auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS) self.assertRaises(Exception, RunAutoPush) + def testAutoRollExistingRoll(self): + self.ExpectReadURL([ + URL("https://codereview.chromium.org/search", + "owner=author%40chromium.org&limit=30&closed=3&format=json", + ("{\"results\": [{\"subject\": \"different\"}," + "{\"subject\": \"Update V8 to Version...\"}]}")), + ]) + + result = auto_roll.AutoRoll(TEST_CONFIG, self).Run( + AUTO_PUSH_ARGS + ["-c", TEST_CONFIG[CHROMIUM]]) + self.assertEquals(1, result) + + # Snippet from the original DEPS file. + FAKE_DEPS = """ +vars = { + "v8_revision": "123455", +} +deps = { + "src/v8": + (Var("googlecode_url") % "v8") + "/" + Var("v8_branch") + "@" + + Var("v8_revision"), +} +""" + + def testAutoRollUpToDate(self): + self.ExpectReadURL([ + URL("https://codereview.chromium.org/search", + "owner=author%40chromium.org&limit=30&closed=3&format=json", + ("{\"results\": [{\"subject\": \"different\"}]}")), + URL("http://src.chromium.org/svn/trunk/src/DEPS", + self.FAKE_DEPS), + ]) + + self.ExpectGit([ + Git(("log -1 --format=%H --grep=" + "\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" " + "svn/trunk"), "push_hash\n"), + Git("svn find-rev push_hash", "123455\n"), + ]) + + result = auto_roll.AutoRoll(TEST_CONFIG, self).Run( + AUTO_PUSH_ARGS + ["-c", TEST_CONFIG[CHROMIUM]]) + self.assertEquals(1, result) + + def testAutoRoll(self): + self.ExpectReadURL([ + URL("https://codereview.chromium.org/search", + "owner=author%40chromium.org&limit=30&closed=3&format=json", + ("{\"results\": [{\"subject\": \"different\"}]}")), + URL("http://src.chromium.org/svn/trunk/src/DEPS", + self.FAKE_DEPS), + ]) + + self.ExpectGit([ + Git(("log -1 --format=%H --grep=" + "\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" " + "svn/trunk"), "push_hash\n"), + Git("svn find-rev push_hash", "123456\n"), + ]) + + result = auto_roll.AutoRoll(TEST_CONFIG, self).Run( + AUTO_PUSH_ARGS + ["-c", TEST_CONFIG[CHROMIUM], "--roll"]) + self.assertEquals(0, result) + def testMergeToBranch(self): TEST_CONFIG[ALREADY_MERGING_SENTINEL_FILE] = self.MakeEmptyTempFile() TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile() @@ -920,7 +1027,7 @@ Performance and stability improvements on all platforms.""", commit) return lambda: self.assertEquals(patch, FileToText(TEST_CONFIG[TEMPORARY_PATCH_FILE])) - msg = """Merged r12345, r23456, r34567, r45678, r56789 into trunk branch. + msg = """Version 3.22.5.1 (merged r12345, r23456, r34567, r45678, r56789) Title4 @@ -930,7 +1037,7 @@ Title3 Title1 -Title5 +Revert "Something" BUG=123,234,345,456,567,v8:123 LOG=N @@ -950,8 +1057,6 @@ LOG=N Git("status -s -b -uno", "## some_branch\n"), Git("svn fetch", ""), Git("branch", " branch1\n* branch2\n"), - Git("checkout -b %s" % TEST_CONFIG[TEMP_BRANCH], ""), - Git("branch", " branch1\n* branch2\n"), Git("checkout -b %s svn/trunk" % TEST_CONFIG[BRANCHNAME], ""), Git("log --format=%H --grep=\"Port r12345\" --reverse svn/bleeding_edge", "hash1\nhash2"), @@ -978,12 +1083,12 @@ LOG=N Git("log -1 --format=%s hash2", "Title2"), Git("log -1 --format=%s hash3", "Title3"), Git("log -1 --format=%s hash1", "Title1"), - Git("log -1 --format=%s hash5", "Title5"), + Git("log -1 --format=%s hash5", "Revert \"Something\""), Git("log -1 hash4", "Title4\nBUG=123\nBUG=234"), Git("log -1 hash2", "Title2\n BUG = v8:123,345"), Git("log -1 hash3", "Title3\nLOG=n\nBUG=567, 456"), - Git("log -1 hash1", "Title1"), - Git("log -1 hash5", "Title5"), + Git("log -1 hash1", "Title1\nBUG="), + Git("log -1 hash5", "Revert \"Something\"\nBUG=none"), Git("log -1 -p hash4", "patch4"), Git("apply --index --reject \"%s\"" % TEST_CONFIG[TEMPORARY_PATCH_FILE], "", cb=VerifyPatch("patch4")), @@ -1006,13 +1111,13 @@ LOG=N Git("cl presubmit", "Presubmit successfull\n"), Git("cl dcommit -f --bypass-hooks", "Closing issue\n", cb=VerifySVNCommit), Git("svn fetch", ""), - Git("log -1 --format=%%H --grep=\"%s\" svn/trunk" % msg, "hash6"), + Git(("log -1 --format=%%H --grep=\"%s\" svn/trunk" + % msg.replace("\"", "\\\"")), "hash6"), Git("svn find-rev hash6", "1324"), Git(("copy -r 1324 https://v8.googlecode.com/svn/trunk " "https://v8.googlecode.com/svn/tags/3.22.5.1 -m " "\"Tagging version 3.22.5.1\""), ""), Git("checkout -f some_branch", ""), - Git("branch -D %s" % TEST_CONFIG[TEMP_BRANCH], ""), Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], ""), ]) @@ -1036,6 +1141,121 @@ LOG=N args += ["-s", "3"] MergeToBranch(TEST_CONFIG, self).Run(args) + def testReleases(self): + json_output = self.MakeEmptyTempFile() + csv_output = self.MakeEmptyTempFile() + TEST_CONFIG[VERSION_FILE] = self.MakeEmptyTempFile() + self.WriteFakeVersionFile() + + TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile() + if not os.path.exists(TEST_CONFIG[CHROMIUM]): + os.makedirs(TEST_CONFIG[CHROMIUM]) + def WriteDEPS(revision): + TextToFile("Line\n \"v8_revision\": \"%s\",\n line\n" % revision, + TEST_CONFIG[DEPS_FILE]) + WriteDEPS(567) + + def ResetVersion(minor, build, patch=0): + return lambda: self.WriteFakeVersionFile(minor=minor, + build=build, + patch=patch) + + def ResetDEPS(revision): + return lambda: WriteDEPS(revision) + + self.ExpectGit([ + Git("status -s -uno", ""), + Git("status -s -b -uno", "## some_branch\n"), + Git("svn fetch", ""), + Git("branch", " branch1\n* branch2\n"), + Git("checkout -b %s" % TEST_CONFIG[BRANCHNAME], ""), + Git("branch -r", " svn/3.21\n svn/3.3\n"), + Git("reset --hard svn/3.3", ""), + Git("log --format=%H", "hash1\nhash2"), + Git("diff --name-only hash1 hash1^", ""), + Git("diff --name-only hash2 hash2^", TEST_CONFIG[VERSION_FILE]), + Git("checkout -f hash2 -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(3, 1, 1)), + Git("log -1 --format=%B hash2", + "Version 3.3.1.1 (merged 12)\n\nReview URL: fake.com\n"), + Git("log -1 --format=%s hash2", ""), + Git("svn find-rev hash2", "234"), + Git("log -1 --format=%ci hash2", "18:15"), + Git("checkout -f HEAD -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(22, 5)), + Git("reset --hard svn/3.21", ""), + Git("log --format=%H", "hash3\nhash4\nhash5\n"), + Git("diff --name-only hash3 hash3^", TEST_CONFIG[VERSION_FILE]), + Git("checkout -f hash3 -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(21, 2)), + Git("log -1 --format=%B hash3", ""), + Git("log -1 --format=%s hash3", ""), + Git("svn find-rev hash3", "123"), + Git("log -1 --format=%ci hash3", "03:15"), + Git("checkout -f HEAD -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(22, 5)), + Git("reset --hard svn/trunk", ""), + Git("log --format=%H", "hash6\n"), + Git("diff --name-only hash6 hash6^", TEST_CONFIG[VERSION_FILE]), + Git("checkout -f hash6 -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(22, 3)), + Git("log -1 --format=%B hash6", ""), + Git("log -1 --format=%s hash6", ""), + Git("svn find-rev hash6", "345"), + Git("log -1 --format=%ci hash6", ""), + Git("checkout -f HEAD -- %s" % TEST_CONFIG[VERSION_FILE], "", + cb=ResetVersion(22, 5)), + Git("status -s -uno", ""), + Git("checkout -f master", ""), + Git("pull", ""), + Git("checkout -b %s" % TEST_CONFIG[BRANCHNAME], ""), + Git("log --format=%H --grep=\"V8\"", "c_hash1\nc_hash2\n"), + Git("diff --name-only c_hash1 c_hash1^", ""), + Git("diff --name-only c_hash2 c_hash2^", TEST_CONFIG[DEPS_FILE]), + Git("checkout -f c_hash2 -- %s" % TEST_CONFIG[DEPS_FILE], "", + cb=ResetDEPS(345)), + Git("svn find-rev c_hash2", "4567"), + Git("checkout -f HEAD -- %s" % TEST_CONFIG[DEPS_FILE], "", + cb=ResetDEPS(567)), + Git("branch -r", " weird/123\n branch-heads/7\n"), + Git("checkout -f branch-heads/7 -- %s" % TEST_CONFIG[DEPS_FILE], "", + cb=ResetDEPS(345)), + Git("checkout -f HEAD -- %s" % TEST_CONFIG[DEPS_FILE], "", + cb=ResetDEPS(567)), + Git("checkout -f master", ""), + Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], ""), + Git("checkout -f some_branch", ""), + Git("branch -D %s" % TEST_CONFIG[BRANCHNAME], ""), + ]) + + args = ["-c", TEST_CONFIG[CHROMIUM], + "--json", json_output, + "--csv", csv_output, + "--max-releases", "1"] + Releases(TEST_CONFIG, self).Run(args) + + # Check expected output. + csv = ("3.22.3,trunk,345,4567,\r\n" + "3.21.2,3.21,123,,\r\n" + "3.3.1.1,3.3,234,,12\r\n") + self.assertEquals(csv, FileToText(csv_output)) + + expected_json = [ + {"bleeding_edge": "", "patches_merged": "", "version": "3.22.3", + "chromium_revision": "4567", "branch": "trunk", "revision": "345", + "review_link": "", "date": "", "chromium_branch": "7", + "revision_link": "https://code.google.com/p/v8/source/detail?r=345"}, + {"patches_merged": "", "bleeding_edge": "", "version": "3.21.2", + "chromium_revision": "", "branch": "3.21", "revision": "123", + "review_link": "", "date": "03:15", "chromium_branch": "", + "revision_link": "https://code.google.com/p/v8/source/detail?r=123"}, + {"patches_merged": "12", "bleeding_edge": "", "version": "3.3.1.1", + "chromium_revision": "", "branch": "3.3", "revision": "234", + "review_link": "fake.com", "date": "18:15", "chromium_branch": "", + "revision_link": "https://code.google.com/p/v8/source/detail?r=234"}, + ] + self.assertEquals(expected_json, json.loads(FileToText(json_output))) + class SystemTest(unittest.TestCase): def testReload(self): diff --git a/deps/v8/tools/run-deopt-fuzzer.py b/deps/v8/tools/run-deopt-fuzzer.py index b809fdf98..21894ff52 100755 --- a/deps/v8/tools/run-deopt-fuzzer.py +++ b/deps/v8/tools/run-deopt-fuzzer.py @@ -213,6 +213,8 @@ def BuildOptions(): default= -1, type="int") result.add_option("-v", "--verbose", help="Verbose output", default=False, action="store_true") + result.add_option("--random-seed", default=0, dest="random_seed", + help="Default seed for initializing random generator") return result @@ -242,6 +244,8 @@ def ProcessOptions(options): options.extra_flags = shlex.split(options.extra_flags) if options.j == 0: options.j = multiprocessing.cpu_count() + while options.random_seed == 0: + options.random_seed = random.SystemRandom().randint(-2147483648, 2147483647) if not options.distribution_mode in DISTRIBUTION_MODES: print "Unknown distribution mode %s" % options.distribution_mode return False @@ -362,7 +366,8 @@ def Execute(arch, mode, args, options, suites, workspace): timeout, options.isolates, options.command_prefix, options.extra_flags, - False) + False, + options.random_seed) # Find available test suites and read test cases from them. variables = { @@ -373,6 +378,7 @@ def Execute(arch, mode, args, options, suites, workspace): "isolates": options.isolates, "mode": mode, "no_i18n": False, + "no_snap": False, "simulator": utils.UseSimulator(arch), "system": utils.GuessOS(), } diff --git a/deps/v8/tools/run-tests.py b/deps/v8/tools/run-tests.py index cc1d480b4..5cf49049f 100755 --- a/deps/v8/tools/run-tests.py +++ b/deps/v8/tools/run-tests.py @@ -34,6 +34,7 @@ import optparse import os from os.path import join import platform +import random import shlex import subprocess import sys @@ -49,7 +50,7 @@ from testrunner.objects import context ARCH_GUESS = utils.DefaultArch() -DEFAULT_TESTS = ["mjsunit", "cctest", "message", "preparser"] +DEFAULT_TESTS = ["mjsunit", "fuzz-natives", "cctest", "message", "preparser"] TIMEOUT_DEFAULT = 60 TIMEOUT_SCALEFACTOR = {"debug" : 4, "release" : 1 } @@ -79,6 +80,7 @@ SUPPORTED_ARCHS = ["android_arm", "android_ia32", "arm", "ia32", + "mips", "mipsel", "nacl_ia32", "nacl_x64", @@ -89,6 +91,7 @@ SLOW_ARCHS = ["android_arm", "android_arm64", "android_ia32", "arm", + "mips", "mipsel", "nacl_ia32", "nacl_x64", @@ -149,6 +152,9 @@ def BuildOptions(): result.add_option("--no-presubmit", "--nopresubmit", help='Skip presubmit checks', default=False, dest="no_presubmit", action="store_true") + result.add_option("--no-snap", "--nosnap", + help='Test a build compiled without snapshot.', + default=False, dest="no_snap", action="store_true") result.add_option("--no-stress", "--nostress", help="Don't run crankshaft --always-opt --stress-op test", default=False, dest="no_stress", action="store_true") @@ -197,6 +203,8 @@ def BuildOptions(): result.add_option("--junittestsuite", help="The testsuite name in the JUnit output file", default="v8tests") + result.add_option("--random-seed", default=0, dest="random_seed", + help="Default seed for initializing random generator") return result @@ -247,6 +255,9 @@ def ProcessOptions(options): if options.j == 0: options.j = multiprocessing.cpu_count() + while options.random_seed == 0: + options.random_seed = random.SystemRandom().randint(-2147483648, 2147483647) + def excl(*args): """Returns true if zero or one of multiple arguments are true.""" return reduce(lambda x, y: x + y, args) <= 1 @@ -393,7 +404,8 @@ def Execute(arch, mode, args, options, suites, workspace): timeout, options.isolates, options.command_prefix, options.extra_flags, - options.no_i18n) + options.no_i18n, + options.random_seed) # TODO(all): Combine "simulator" and "simulator_run". simulator_run = not options.dont_skip_simulator_slow_tests and \ @@ -407,6 +419,7 @@ def Execute(arch, mode, args, options, suites, workspace): "isolates": options.isolates, "mode": mode, "no_i18n": options.no_i18n, + "no_snap": options.no_snap, "simulator_run": simulator_run, "simulator": utils.UseSimulator(arch), "system": utils.GuessOS(), diff --git a/deps/v8/tools/testrunner/local/execution.py b/deps/v8/tools/testrunner/local/execution.py index 4453c0845..f4a40204e 100644 --- a/deps/v8/tools/testrunner/local/execution.py +++ b/deps/v8/tools/testrunner/local/execution.py @@ -171,6 +171,7 @@ class Runner(object): cmd = (self.context.command_prefix + [os.path.abspath(os.path.join(self.context.shell_dir, shell))] + d8testflag + + ["--random-seed=%s" % self.context.random_seed] + test.suite.GetFlagsForTestCase(test, self.context) + self.context.extra_flags) return cmd diff --git a/deps/v8/tools/testrunner/local/statusfile.py b/deps/v8/tools/testrunner/local/statusfile.py index 826b576f2..a45add33d 100644 --- a/deps/v8/tools/testrunner/local/statusfile.py +++ b/deps/v8/tools/testrunner/local/statusfile.py @@ -53,8 +53,8 @@ DEFS = {FAIL_OK: [FAIL, OKAY], # Support arches, modes to be written as keywords instead of strings. VARIABLES = {ALWAYS: True} for var in ["debug", "release", "android_arm", "android_arm64", "android_ia32", - "arm", "arm64", "ia32", "mipsel", "x64", "nacl_ia32", "nacl_x64", - "macos", "windows", "linux"]: + "arm", "arm64", "ia32", "mips", "mipsel", "x64", "nacl_ia32", + "nacl_x64", "macos", "windows", "linux"]: VARIABLES[var] = var diff --git a/deps/v8/tools/testrunner/objects/context.py b/deps/v8/tools/testrunner/objects/context.py index 1f525b76b..68c198924 100644 --- a/deps/v8/tools/testrunner/objects/context.py +++ b/deps/v8/tools/testrunner/objects/context.py @@ -28,7 +28,7 @@ class Context(): def __init__(self, arch, mode, shell_dir, mode_flags, verbose, timeout, - isolates, command_prefix, extra_flags, noi18n): + isolates, command_prefix, extra_flags, noi18n, random_seed): self.arch = arch self.mode = mode self.shell_dir = shell_dir @@ -39,13 +39,16 @@ class Context(): self.command_prefix = command_prefix self.extra_flags = extra_flags self.noi18n = noi18n + self.random_seed = random_seed def Pack(self): return [self.arch, self.mode, self.mode_flags, self.timeout, self.isolates, - self.command_prefix, self.extra_flags, self.noi18n] + self.command_prefix, self.extra_flags, self.noi18n, + self.random_seed] @staticmethod def Unpack(packed): # For the order of the fields, refer to Pack() above. return Context(packed[0], packed[1], None, packed[2], False, - packed[3], packed[4], packed[5], packed[6], packed[7]) + packed[3], packed[4], packed[5], packed[6], packed[7], + packed[8]) |